From f5092af8b94a58c09ab74f7d3870229a6424776e Mon Sep 17 00:00:00 2001 From: Andrew Shannon Brown Date: Mon, 23 Jul 2018 17:09:29 -0700 Subject: [PATCH 001/292] Remove @ashanbrown from codeowners --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index 44429ee1..8b137891 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @ashanbrown + From fd63b2b84cd7806bbbacb094b0ac3ce2502fe94f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:40:01 -0700 Subject: [PATCH 002/292] log exception stacktraces at debug level --- lib/ldclient-rb/events.rb | 4 ++-- lib/ldclient-rb/ldclient.rb | 12 +++--------- lib/ldclient-rb/util.rb | 5 +++++ spec/ldclient_spec.rb | 11 ----------- 4 files changed, 10 insertions(+), 22 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 0c9a0ece..202fc235 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -142,7 +142,7 @@ def main_loop(queue, buffer, flush_workers) message.completed end rescue => e - @config.logger.warn { "[LDClient] Unexpected error in event processor: #{e.inspect}. \nTrace: #{e.backtrace}" } + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end @@ -226,7 +226,7 @@ def trigger_flush(buffer, flush_workers) resp = EventPayloadSendTask.new.run(@sdk_key, @config, @client, payload, @formatter) handle_response(resp) if !resp.nil? rescue => e - @config.logger.warn { "[LDClient] Unexpected error in event processor: #{e.inspect}. \nTrace: #{e.backtrace}" } + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end buffer.clear if success # Reset our internal state, these events now belong to the flush worker diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5c0e872d..3f0f6d9a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -162,7 +162,7 @@ def variation(key, user, default) @event_processor.add_event(make_feature_event(feature, user, res[:variation], value, default)) return value rescue => exn - @config.logger.warn { "[LDClient] Error evaluating feature flag: #{exn.inspect}. \nTrace: #{exn.backtrace}" } + Util.log_exception(@config.logger, "Error evaluating feature flag", exn) @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) return default end @@ -210,7 +210,7 @@ def all_flags(user) # TODO rescue if necessary Hash[features.map{ |k, f| [k, evaluate(f, user, @store, @config.logger)[:value]] }] rescue => exn - @config.logger.warn { "[LDClient] Error evaluating all flags: #{exn.inspect}. \nTrace: #{exn.backtrace}" } + Util.log_exception(@config.logger, "Error evaluating all flags", exn) return Hash.new end end @@ -226,12 +226,6 @@ def close @store.stop end - def log_exception(caller, exn) - error_traceback = "#{exn.inspect} #{exn}\n\t#{exn.backtrace.join("\n\t")}" - error = "[LDClient] Unexpected exception in #{caller}: #{error_traceback}" - @config.logger.error { error } - end - def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s @@ -252,7 +246,7 @@ def make_feature_event(flag, user, variation, value, default) } end - private :evaluate, :log_exception, :sanitize_user, :make_feature_event + private :evaluate, :sanitize_user, :make_feature_event end # diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 6ba70dbc..99ee2477 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,6 +1,11 @@ module LaunchDarkly module Util + def self.log_exception(logger, message, exc) + logger.warn { "[LDClient] #{message}: #{exc.inspect}" } + logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } + end + def self.http_error_recoverable?(status) if status >= 400 && status < 500 status == 400 || status == 408 || status == 429 diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 8e4b5eb5..68c57166 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -130,17 +130,6 @@ def event_processor end end - describe '#log_exception' do - it "log error data" do - expect(client.instance_variable_get(:@config).logger).to receive(:error) - begin - raise StandardError.new 'asdf' - rescue StandardError => exn - client.send(:log_exception, 'caller', exn) - end - end - end - describe 'with send_events: false' do let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, update_processor: update_processor}) } let(:client) { subject.new("secret", config) } From d4be186ed2026056dd9768fd2b265181f9353c72 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:48:15 -0700 Subject: [PATCH 003/292] re-add minimal unit test --- spec/util_spec.rb | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 spec/util_spec.rb diff --git a/spec/util_spec.rb b/spec/util_spec.rb new file mode 100644 index 00000000..25881aaa --- /dev/null +++ b/spec/util_spec.rb @@ -0,0 +1,17 @@ +require "spec_helper" + +describe LaunchDarkly::Util do + describe 'log_exception' do + let(:logger) { double() } + + it "logs error data" do + expect(logger).to receive(:warn) + expect(logger).to receive(:debug) + begin + raise StandardError.new 'asdf' + rescue StandardError => exn + LaunchDarkly::Util.log_exception(logger, "message", exn) + end + end + end +end From d73d66c19c03511905aa9eef827bb656b19791be Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:51:32 -0700 Subject: [PATCH 004/292] log exceptions at error level --- lib/ldclient-rb/util.rb | 2 +- spec/util_spec.rb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 99ee2477..707ba3ce 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -2,7 +2,7 @@ module LaunchDarkly module Util def self.log_exception(logger, message, exc) - logger.warn { "[LDClient] #{message}: #{exc.inspect}" } + logger.error { "[LDClient] #{message}: #{exc.inspect}" } logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } end diff --git a/spec/util_spec.rb b/spec/util_spec.rb index 25881aaa..50a72f76 100644 --- a/spec/util_spec.rb +++ b/spec/util_spec.rb @@ -5,7 +5,7 @@ let(:logger) { double() } it "logs error data" do - expect(logger).to receive(:warn) + expect(logger).to receive(:error) expect(logger).to receive(:debug) begin raise StandardError.new 'asdf' From ca15234e9214701061528d3ce702c20d34d3a9a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Aug 2018 16:30:19 -0700 Subject: [PATCH 005/292] add new version of all_flags that captures more metadata --- lib/ldclient-rb.rb | 1 + lib/ldclient-rb/flags_state.rb | 51 +++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 44 ++++++++++++---- spec/ldclient_spec.rb | 91 ++++++++++++++++++++++++++++++++++ 4 files changed, 176 insertions(+), 11 deletions(-) create mode 100644 lib/ldclient-rb/flags_state.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index ce9d0307..7264b220 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,6 +1,7 @@ require "ldclient-rb/version" require "ldclient-rb/util" require "ldclient-rb/evaluation" +require "ldclient-rb/flags_state" require "ldclient-rb/ldclient" require "ldclient-rb/cache_store" require "ldclient-rb/expiring_cache" diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb new file mode 100644 index 00000000..f68dc20b --- /dev/null +++ b/lib/ldclient-rb/flags_state.rb @@ -0,0 +1,51 @@ + +module LaunchDarkly + # + # A snapshot of the state of all feature flags with regard to a specific user, generated by + # calling the client's all_flags_state method. + # + class FeatureFlagsState + def initialize(valid) + @flag_values = {} + @flag_metadata = {} + @valid = valid + end + + # Used internally to build the state map. + def add_flag(flag, value, variation) + key = flag[:key] + @flag_values[key] = value + meta = { version: flag[:version], trackEvents: flag[:trackEvents] } + meta[:variation] = variation if !variation.nil? + meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + @flag_metadata[key] = meta + end + + # Returns true if this object contains a valid snapshot of feature flag state, or false if the + # state could not be computed (for instance, because the client was offline or there was no user). + def valid? + @valid + end + + # Returns the value of an individual feature flag at the time the state was recorded. + # Returns nil if the flag returned the default value, or if there was no such flag. + def flag_value(key) + @flag_values[key] + end + + # Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, + # its value will be nil. + def values_map + @flag_values + end + + # Returns a JSON string representation of the entire state map, in the format used by the + # LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end that + # will be used to "bootstrap" the JavaScript client. + def json_string + ret = @flag_values.clone + ret['$flagsState'] = @flag_metadata + ret.to_json + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 3f0f6d9a..5c64b7e7 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -193,26 +193,48 @@ def track(event_name, user, data) end # - # Returns all feature flag values for the given user + # Returns all feature flag values for the given user. This method is deprecated - please use + # all_flags_state instead. Current versions of the client-side SDK (2.0.0 and later) will not + # generate analytics events correctly if you pass the result of all_flags. # def all_flags(user) - sanitize_user(user) - return Hash.new if @config.offline? + all_flags_state(user).values_map + end - unless user - @config.logger.error { "[LDClient] Must specify user in all_flags" } - return Hash.new + # + # Returns a FeatureFlagsState object that encapsulates the state of all feature flags for a given user, + # including the flag values and also metadata that can be used on the front end. This method does not + # send analytics events back to LaunchDarkly. + # + def all_flags_state(user) + return FeatureFlagsState.new(false) if @config.offline? + + unless user && !user[:key].nil? + @config.logger.error { "[LDClient] User and user key must be specified in all_flags_state" } + return FeatureFlagsState.new(false) end + sanitize_user(user) + begin features = @store.all(FEATURES) - - # TODO rescue if necessary - Hash[features.map{ |k, f| [k, evaluate(f, user, @store, @config.logger)[:value]] }] rescue => exn - Util.log_exception(@config.logger, "Error evaluating all flags", exn) - return Hash.new + Util.log_exception(@config.logger, "Unable to read flags for all_flags_state", exn) + return FeatureFlagsState.new(false) + end + + state = FeatureFlagsState.new(true) + features.each do |k, f| + begin + result = evaluate(f, user, @store, @config.logger) + state.add_flag(f, result[:value], result[:variation]) + rescue => exn + Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) + state.add_flag(f, nil, nil) + end end + + state end # diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 68c57166..9d13dee0 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -99,6 +99,97 @@ def event_processor end end + describe '#all_flags' do + let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } + let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } + + it "returns flag values" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({ key: 'userkey' }) + expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "returns empty map for nil user" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags(nil) + expect(result).to eq({}) + end + + it "returns empty map for nil user key" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({}) + expect(result).to eq({}) + end + + it "returns empty map if offline" do + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = offline_client.all_flags(nil) + expect(result).to eq({}) + end + end + + describe '#all_flags_state' do + let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } + let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } + + it "returns flags state" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + + result = JSON.parse(state.json_string) + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + 'variation' => 0, + 'version' => 100, + 'trackEvents' => false + }, + 'key2' => { + 'variation' => 1, + 'version' => 200, + 'trackEvents' => true, + 'debugEventsUntilDate' => 1000 + } + } + }) + end + + it "returns empty state for nil user" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state(nil) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state for nil user key" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({}) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state if offline" do + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + describe '#secure_mode_hash' do it "will return the expected value for a known message and secret" do result = client.secure_mode_hash({key: :Message}) From ed19523fd0d93306204929248e179945fdabf10f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Aug 2018 16:37:43 -0700 Subject: [PATCH 006/292] add tests for FeatureFlagsState --- spec/flags_state_spec.rb | 56 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 spec/flags_state_spec.rb diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb new file mode 100644 index 00000000..9241028d --- /dev/null +++ b/spec/flags_state_spec.rb @@ -0,0 +1,56 @@ +require "spec_helper" + +describe LaunchDarkly::FeatureFlagsState do + subject { LaunchDarkly::FeatureFlagsState } + + it "can get flag value" do + state = subject.new(true) + flag = { key: 'key' } + state.add_flag(flag, 'value', 1) + + expect(state.flag_value('key')).to eq 'value' + end + + it "returns nil for unknown flag" do + state = subject.new(true) + + expect(state.flag_value('key')).to be nil + end + + it "can be converted to values map" do + state = subject.new(true) + flag1 = { key: 'key1' } + flag2 = { key: 'key2' } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "can be converted to JSON string" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + result = JSON.parse(state.json_string) + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + 'variation' => 0, + 'version' => 100, + 'trackEvents' => false + }, + 'key2' => { + 'variation' => 1, + 'version' => 200, + 'trackEvents' => true, + 'debugEventsUntilDate' => 1000 + } + } + }) + end +end From 73f2d892fa166b5ccf2b68f268f77c04a49462ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 12:59:10 -0700 Subject: [PATCH 007/292] provide as_json method that returns a hash instead of just a string --- lib/ldclient-rb/flags_state.rb | 22 +++++++++++++++++----- lib/ldclient-rb/ldclient.rb | 4 ++-- spec/flags_state_spec.rb | 30 +++++++++++++++++++++--------- spec/ldclient_spec.rb | 16 ++++++++-------- 4 files changed, 48 insertions(+), 24 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index f68dc20b..a5af6c5a 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -1,3 +1,4 @@ +require 'json' module LaunchDarkly # @@ -35,17 +36,28 @@ def flag_value(key) # Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, # its value will be nil. + # + # Do not use this method if you are passing data to the front end to "bootstrap" the JavaScript client. + # Instead, use as_json. def values_map @flag_values end - # Returns a JSON string representation of the entire state map, in the format used by the - # LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end that - # will be used to "bootstrap" the JavaScript client. - def json_string + # Returns a hash that can be used as a JSON representation of the entire state map, in the format + # used by the LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end + # in order to "bootstrap" the JavaScript client. + # + # Do not rely on the exact shape of this data, as it may change in future to support the needs of + # the JavaScript client. + def as_json(*) # parameter is unused, but may be passed if we're using the json gem ret = @flag_values.clone ret['$flagsState'] = @flag_metadata - ret.to_json + ret + end + + # Same as as_json, but converts the JSON structure into a string. + def to_json(*a) + as_json.to_json(a) end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5c64b7e7..c8addbca 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -194,8 +194,8 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use - # all_flags_state instead. Current versions of the client-side SDK (2.0.0 and later) will not - # generate analytics events correctly if you pass the result of all_flags. + # all_flags_state instead. Current versions of the client-side SDK will not generate analytics + # events correctly if you pass the result of all_flags. # def all_flags(user) all_flags_state(user).values_map diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 9241028d..e6e1c17c 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -27,30 +27,42 @@ expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end - it "can be converted to JSON string" do + it "can be converted to JSON structure" do state = subject.new(true) flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } state.add_flag(flag1, 'value1', 0) state.add_flag(flag2, 'value2', 1) - result = JSON.parse(state.json_string) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2', '$flagsState' => { 'key1' => { - 'variation' => 0, - 'version' => 100, - 'trackEvents' => false + :variation => 0, + :version => 100, + :trackEvents => false }, 'key2' => { - 'variation' => 1, - 'version' => 200, - 'trackEvents' => true, - 'debugEventsUntilDate' => 1000 + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 } } }) end + + it "can be converted to JSON string" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + object = state.as_json + str = state.to_json + expect(object.to_json).to eq(str) + end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 9d13dee0..b5939ea1 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -145,21 +145,21 @@ def event_processor values = state.values_map expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - result = JSON.parse(state.json_string) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2', '$flagsState' => { 'key1' => { - 'variation' => 0, - 'version' => 100, - 'trackEvents' => false + :variation => 0, + :version => 100, + :trackEvents => false }, 'key2' => { - 'variation' => 1, - 'version' => 200, - 'trackEvents' => true, - 'debugEventsUntilDate' => 1000 + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 } } }) From ab896b1e801f944166c5525e6aa1d00cf333da0b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 20:01:32 -0700 Subject: [PATCH 008/292] state can be serialized with JSON.generate --- lib/ldclient-rb/flags_state.rb | 5 ++++- spec/flags_state_spec.rb | 16 +++++++++++++++- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index a5af6c5a..09f88975 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -3,7 +3,9 @@ module LaunchDarkly # # A snapshot of the state of all feature flags with regard to a specific user, generated by - # calling the client's all_flags_state method. + # calling the client's all_flags_state method. Serializing this object to JSON using + # JSON.generate (or the to_json method) will produce the appropriate data structure for + # bootstrapping the LaunchDarkly JavaScript client. # class FeatureFlagsState def initialize(valid) @@ -52,6 +54,7 @@ def values_map def as_json(*) # parameter is unused, but may be passed if we're using the json gem ret = @flag_values.clone ret['$flagsState'] = @flag_metadata + ret['$valid'] = @valid ret end diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index e6e1c17c..3d21029b 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -1,4 +1,5 @@ require "spec_helper" +require "json" describe LaunchDarkly::FeatureFlagsState do subject { LaunchDarkly::FeatureFlagsState } @@ -50,7 +51,8 @@ :trackEvents => true, :debugEventsUntilDate => 1000 } - } + }, + '$valid' => true }) end @@ -65,4 +67,16 @@ str = state.to_json expect(object.to_json).to eq(str) end + + it "uses our custom serializer with JSON.generate" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + stringFromToJson = state.to_json + stringFromGenerate = JSON.generate(state) + expect(stringFromGenerate).to eq(stringFromToJson) + end end From 00347c66ae17167910d316617e061d85f6793681 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 20:02:41 -0700 Subject: [PATCH 009/292] add $valid --- spec/ldclient_spec.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index b5939ea1..5dbb8195 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -161,7 +161,8 @@ def event_processor :trackEvents => true, :debugEventsUntilDate => 1000 } - } + }, + '$valid' => true }) end From bdac27e1cf37e2c95c4455d705a99aaa2a948b28 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 21 Aug 2018 11:46:14 -0700 Subject: [PATCH 010/292] add ability to filter for only client-side flags --- lib/ldclient-rb/ldclient.rb | 17 +++++++++++++++-- spec/ldclient_spec.rb | 16 ++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index c8addbca..e9873679 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -194,9 +194,12 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use - # all_flags_state instead. Current versions of the client-side SDK will not generate analytics + # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics # events correctly if you pass the result of all_flags. # + # @param user [Hash] The end user requesting the feature flags + # @return [Hash] a hash of feature flag keys to values + # def all_flags(user) all_flags_state(user).values_map end @@ -206,7 +209,13 @@ def all_flags(user) # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # - def all_flags_state(user) + # @param user [Hash] The end user requesting the feature flags + # @param options={} [Hash] Optional parameters to control how the state is generated + # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the + # client-side SDK should be included in the state. By default, all flags are included. + # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON + # + def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? unless user && !user[:key].nil? @@ -224,7 +233,11 @@ def all_flags_state(user) end state = FeatureFlagsState.new(true) + client_only = options[:client_side_only] || false features.each do |k, f| + if client_only && !f[:clientSide] + next + end begin result = evaluate(f, user, @store, @config.logger) state.add_flag(f, result[:value], result[:variation]) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 5dbb8195..ae76a678 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -166,6 +166,22 @@ def event_processor }) end + it "can be filtered for only client-side flags" do + flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } + flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } + flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } + flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } + config.feature_store.init({ LaunchDarkly::FEATURES => { + flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 + }}) + + state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + end + it "returns empty state for nil user" do config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) From cee4c18aa0a6330cd3e24f6c9b11914cae57d34d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 19:58:42 -0700 Subject: [PATCH 011/292] implement evaluation with explanations --- lib/ldclient-rb/evaluation.rb | 185 +++++++++++++++++---------- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/flags_state.rb | 3 +- lib/ldclient-rb/ldclient.rb | 124 ++++++++++-------- spec/evaluation_spec.rb | 221 +++++++++++++++++++-------------- spec/ldclient_spec.rb | 98 ++++++++++++--- 6 files changed, 403 insertions(+), 229 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index b4dd796c..b803f4a2 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -2,6 +2,31 @@ require "semantic" module LaunchDarkly + # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with + # an explanation of how it was calculated. + class EvaluationDetail + def initialize(value, variation, reason) + @value = value + @variation = variation + @reason = reason + end + + # @return [Object] The result of the flag evaluation. This will be either one of the flag's + # variations or the default value that was passed to the `variation` method. + attr_reader :value + + # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. + # 0 for the first variation - or `nil` if the default value was returned. + attr_reader :variation + + # @return [Hash] An object describing the main factor that influenced the flag evaluation value. + attr_reader :reason + + def ==(other) + @value == other.value && @variation == other.variation && @reason == other.reason + end + end + module Evaluation BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] @@ -110,101 +135,109 @@ def self.comparator(converter) class EvaluationError < StandardError end - # Evaluates a feature flag, returning a hash containing the evaluation result and any events - # generated during prerequisite evaluation. Raises EvaluationError if the flag is not well-formed - # Will return nil, but not raise an exception, indicating that the rules (including fallthrough) did not match - # In that case, the caller should return the default value. - def evaluate(flag, user, store, logger) - if flag.nil? - raise EvaluationError, "Flag does not exist" - end + # Used internally to hold an evaluation result and the events that were generated from prerequisites. + EvalResult = Struct.new(:detail, :events) + + def error_result(errorKind, value = nil) + EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + end + # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns + # the default value. Error conditions produce a result with an error reason, not an exception. + def evaluate(flag, user, store, logger) if user.nil? || user[:key].nil? - raise EvaluationError, "Invalid user" + return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end events = [] if flag[:on] - res = eval_internal(flag, user, store, events, logger) - if !res.nil? - res[:events] = events - return res + detail = eval_internal(flag, user, store, events, logger) + return EvalResult.new(detail, events) + end + + return EvalResult.new(get_off_value(flag, { kind: 'OFF' }), events) + end + + + def eval_internal(flag, user, store, events, logger) + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) + if !prereq_failure_reason.nil? + return get_off_value(flag, prereq_failure_reason) + end + + # Check user target matches + (flag[:targets] || []).each do |target| + (target[:values] || []).each do |value| + if value == user[:key] + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + end + end + end + + # Check custom rules + rules = flag[:rules] || [] + rules.each_index do |i| + rule = rules[i] + if rule_match_user(rule, user, store) + return get_value_for_variation_or_rollout(flag, rule, user, + { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }, logger) end end - offVariation = flag[:offVariation] - if !offVariation.nil? && offVariation < flag[:variations].length - value = flag[:variations][offVariation] - return { variation: offVariation, value: value, events: events } + # Check the fallthrough rule + if !flag[:fallthrough].nil? + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, + { kind: 'FALLTHROUGH' }, logger) end - { variation: nil, value: nil, events: events } + return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) end - def eval_internal(flag, user, store, events, logger) - failed_prereq = false - # Evaluate prerequisites, if any + def check_prerequisites(flag, user, store, events, logger) + failed_prereqs = [] + (flag[:prerequisites] || []).each do |prerequisite| - prereq_flag = store.get(FEATURES, prerequisite[:key]) + prereq_ok = true + prereq_key = prerequisite[:key] + prereq_flag = store.get(FEATURES, prereq_key) if prereq_flag.nil? || !prereq_flag[:on] - failed_prereq = true + logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } + prereq_ok = false + elsif !prereq_flag[:on] + prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, user, store, events, logger) event = { kind: "feature", - key: prereq_flag[:key], - variation: prereq_res.nil? ? nil : prereq_res[:variation], - value: prereq_res.nil? ? nil : prereq_res[:value], + key: prereq_key, + variation: prereq_res.variation, + value: prereq_res.value, version: prereq_flag[:version], prereqOf: flag[:key], trackEvents: prereq_flag[:trackEvents], debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.nil? || prereq_res[:variation] != prerequisite[:variation] - failed_prereq = true + if prereq_res.variation != prerequisite[:variation] + prereq_ok = false end rescue => exn - logger.error { "[LDClient] Error evaluating prerequisite: #{exn.inspect}" } - failed_prereq = true + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + prereq_ok = false end end - end - - if failed_prereq - return nil - end - # The prerequisites were satisfied. - # Now walk through the evaluation steps and get the correct - # variation index - eval_rules(flag, user, store) - end - - def eval_rules(flag, user, store) - # Check user target matches - (flag[:targets] || []).each do |target| - (target[:values] || []).each do |value| - if value == user[:key] - return { variation: target[:variation], value: get_variation(flag, target[:variation]) } - end + if !prereq_ok + failed_prereqs.push(prereq_key) end end - - # Check custom rules - (flag[:rules] || []).each do |rule| - return variation_for_user(rule, user, flag) if rule_match_user(rule, user, store) - end - # Check the fallthrough rule - if !flag[:fallthrough].nil? - return variation_for_user(flag[:fallthrough], user, flag) + if failed_prereqs.empty? + return nil end - - # Not even the fallthrough matched-- return the off variation or default - nil + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } end def get_variation(flag, index) @@ -257,9 +290,9 @@ def clause_match_user_no_segments(clause, user) maybe_negate(clause, match_any(op, val, clause[:values])) end - def variation_for_user(rule, user, flag) + def variation_index_for_user(flag, rule, user) if !rule[:variation].nil? # fixed variation - return { variation: rule[:variation], value: get_variation(flag, rule[:variation]) } + return rule[:variation] elsif !rule[:rollout].nil? # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] @@ -268,12 +301,12 @@ def variation_for_user(rule, user, flag) rollout[:variations].each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket < sum - return { variation: variate[:variation], value: get_variation(flag, variate[:variation]) } + return variate[:variation] end end nil else # the rule isn't well-formed - raise EvaluationError, "Rule does not define a variation or rollout" + nil end end @@ -350,5 +383,31 @@ def match_any(op, value, values) end return false end + + :private + + def get_variation(flag, index, reason) + if index < 0 || index >= flag[:variations].length + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") + return error_result('MALFORMED_FLAG') + end + EvaluationDetail.new(flag[:variations][index], index, reason) + end + + def get_off_value(flag, reason) + if flag[:offVariation].nil? # off variation unspecified - return default value + return EvaluationDetail.new(nil, nil, reason) + end + get_variation(flag, flag[:offVariation], reason) + end + + def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) + index = variation_index_for_user(flag, vr, user) + if index.nil? + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") + return error_result('MALFORMED_FLAG') + end + return get_variation(flag, index, reason) + end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 202fc235..e19d6b02 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -363,6 +363,7 @@ def make_output_event(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end + out[:reason] = event[:reason] if !event[:reason].nil? out when "identify" { diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 09f88975..05079920 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,12 +15,13 @@ def initialize(valid) end # Used internally to build the state map. - def add_flag(flag, value, variation) + def add_flag(flag, value, variation, reason = nil) key = flag[:key] @flag_values[key] = value meta = { version: flag[:version], trackEvents: flag[:trackEvents] } meta[:variation] = variation if !variation.nil? meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + meta[:reason] = reason if !reason.nil? @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index e9873679..8efd422a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -120,52 +120,11 @@ def initialized? # @return the variation to show the user, or the # default value if there's an an error def variation(key, user, default) - return default if @config.offline? - - if !initialized? - if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } - else - @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return default - end - end - - sanitize_user(user) if !user.nil? - feature = @store.get(FEATURES, key) - - if feature.nil? - @config.logger.info { "[LDClient] Unknown feature flag #{key}. Returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return default - end - - unless user - @config.logger.error { "[LDClient] Must specify user" } - @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) - return default - end + evaluate_internal(key, user, default, false).value + end - begin - res = evaluate(feature, user, @store, @config.logger) - if !res[:events].nil? - res[:events].each do |event| - @event_processor.add_event(event) - end - end - value = res[:value] - if value.nil? - @config.logger.debug { "[LDClient] Result value is null in toggle" } - value = default - end - @event_processor.add_event(make_feature_event(feature, user, res[:variation], value, default)) - return value - rescue => exn - Util.log_exception(@config.logger, "Error evaluating feature flag", exn) - @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) - return default - end + def variation_detail(key, user, default) + evaluate_internal(key, user, default, true) end # @@ -213,6 +172,8 @@ def all_flags(user) # @param options={} [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. + # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included + # in the state. By default, they are not included. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) @@ -234,16 +195,17 @@ def all_flags_state(user, options={}) state = FeatureFlagsState.new(true) client_only = options[:client_side_only] || false + with_reasons = options[:with_reasons] || false features.each do |k, f| if client_only && !f[:clientSide] next end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result[:value], result[:variation]) + state.add_flag(f, result.detail.value, result.detail.variation, with_reasons ? result.detail.reason : nil) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil) + state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) end end @@ -261,27 +223,83 @@ def close @store.stop end + :private + + # @return [EvaluationDetail] + def evaluate_internal(key, user, default, include_reasons_in_events) + if @config.offline? + return error_result('CLIENT_NOT_READY', default) + end + + if !initialized? + if @store.initialized? + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } + else + @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } + @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) + return error_result('CLIENT_NOT_READY', default) + end + end + + sanitize_user(user) if !user.nil? + feature = @store.get(FEATURES, key) + + if feature.nil? + @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } + detail = error_result('FLAG_NOT_FOUND', default) + @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user, + reason: include_reasons_in_events ? detail.reason : nil) + return detail + end + + unless user + @config.logger.error { "[LDClient] Must specify user" } + detail = error_result('USER_NOT_SPECIFIED', default) + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + end + + begin + res = evaluate(feature, user, @store, @config.logger) + if !res.events.nil? + res.events.each do |event| + @event_processor.add_event(event) + end + end + detail = res.detail + if detail.variation.nil? + detail = EvaluationDetail.new(default, nil, detail.reason) + end + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + rescue => exn + Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) + detail = error_result('EXCEPTION', default) + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + end + end + def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s end end - def make_feature_event(flag, user, variation, value, default) + def make_feature_event(flag, user, detail, default, with_reasons) { kind: "feature", key: flag[:key], user: user, - variation: variation, - value: value, + variation: detail.variation, + value: detail.value, default: default, version: flag[:version], trackEvents: flag[:trackEvents], - debugEventsUntilDate: flag[:debugEventsUntilDate] + debugEventsUntilDate: flag[:debugEventsUntilDate], + reason: with_reasons ? detail.reason : nil } end - - private :evaluate, :sanitize_user, :make_feature_event end # diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index a8d980ae..d5ee1097 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -2,6 +2,9 @@ describe LaunchDarkly::Evaluation do subject { LaunchDarkly::Evaluation } + + include LaunchDarkly::Evaluation + let(:features) { LaunchDarkly::InMemoryFeatureStore.new } let(:user) { @@ -14,7 +17,13 @@ let(:logger) { LaunchDarkly::Config.default_logger } - include LaunchDarkly::Evaluation + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end describe "evaluate" do it "returns off variation if flag is off" do @@ -26,7 +35,10 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns nil if flag is off and off variation is unspecified" do @@ -37,7 +49,10 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: nil, value: nil, events: []}) + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns off variation if prerequisite is not found" do @@ -50,7 +65,11 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['badfeature'] }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns off variation and event if prerequisite of a prerequisite is not found" do @@ -73,11 +92,15 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "returns off variation and event if prerequisite is not met" do @@ -99,11 +122,15 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "returns fallthrough variation and event if prerequisite is met and there are no rules" do @@ -125,11 +152,14 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 0, value: 'a', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "matches user from targets" do @@ -144,57 +174,96 @@ variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 2, value: 'c', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "matches user from rules" do - flag = { - key: 'feature0', - on: true, - rules: [ - { - clauses: [ - { - attribute: 'key', - op: 'in', - values: [ 'userkey' ] - } - ], - variation: 2 - } - ], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 2, value: 'c', events: []}) + detail = LaunchDarkly::EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end end - describe "clause_match_user" do + describe "clause" do it "can match built-in attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "retrieves segment from segment store for segmentMatch operator" do @@ -208,23 +277,24 @@ user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) expect { clause[:negate] = true - }.to change {clause_match_user(clause, user, features)}.from(true).to(false) + }.to change {evaluate(flag, user, features, logger).detail.value}.from(true).to(false) end end @@ -326,7 +396,8 @@ it "should return #{shouldBe} for #{value1} #{op} #{value2}" do user = { key: 'x', custom: { foo: value1 } } clause = { attribute: 'foo', op: op, values: [value2] } - expect(clause_match_user(clause, user, features)).to be shouldBe + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be shouldBe end end end @@ -385,17 +456,6 @@ end end - def make_flag(key) - { - key: key, - rules: [], - variations: [ false, true ], - on: true, - fallthrough: { variation: 0 }, - version: 1 - } - end - def make_segment(key) { key: key, @@ -424,35 +484,30 @@ def make_user_matching_clause(user, attr) end describe 'segment matching' do - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] + def test_segment_match(segment) features.upsert(LaunchDarkly::SEGMENTS, segment) clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + evaluate(flag, user, features, logger).detail.value + end - result = clause_match_user(clause, user, features) - expect(result).to be true + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true end it 'explicitly excludes user' do segment = make_segment('segkey') segment[:excluded] = [ user[:key] ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end it 'both includes and excludes user; include takes priority' do segment = make_segment('segkey') segment[:included] = [ user[:key] ] segment[:excluded] = [ user[:key] ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user by rule when weight is absent' do @@ -462,11 +517,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user by rule when weight is nil' do @@ -477,11 +528,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user with full rollout' do @@ -492,11 +539,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it "doesn't match user with zero rollout" do @@ -507,11 +550,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end it "matches user with multiple clauses" do @@ -522,11 +561,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it "doesn't match user with multiple clauses if a clause doesn't match" do @@ -538,11 +573,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index ae76a678..efaa1438 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -34,11 +34,18 @@ def event_processor end describe '#variation' do - it "will return the default value if the client is offline" do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do result = offline_client.variation("doesntmatter", user, "default") expect(result).to eq "default" end + it "returns the default value for an unknown feature" do + expect(client.variation("badkey", user, "default")).to eq "default" + end + it "queues a feature request event for an unknown feature" do expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: user, value: "default", default: "default" @@ -46,56 +53,113 @@ def event_processor client.variation("badkey", user, "default") end + it "returns the value for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(client.variation("key", user, "default")).to eq "value" + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: user, variation: 0, - value: true, + value: "value", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 )) - client.variation(feature[:key], user, "default") + client.variation("key", user, "default") end it "queues a feature event for an existing feature when user is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: nil, variation: nil, value: "default", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 )) - client.variation(feature[:key], nil, "default") + client.variation("key", nil, "default") end it "queues a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: bad_user, variation: nil, value: "default", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 + )) + client.variation("key", bad_user, "default") + end + end + + describe '#variation_detail' do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do + result = offline_client.variation_detail("doesntmatter", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'CLIENT_NOT_READY' }) + expect(result).to eq expected + end + + it "returns the default value for an unknown feature" do + result = client.variation_detail("badkey", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND'}) + expect(result).to eq expected + end + + it "queues a feature request event for an unknown feature" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: user, value: "default", default: "default", + reason: { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND' } + )) + client.variation_detail("badkey", user, "default") + end + + it "returns a value for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + result = client.variation_detail("key", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("value", 0, { kind: 'OFF' }) + expect(result).to eq expected + end + + it "queues a feature request event for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", + key: "key", + version: 100, + user: user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000, + reason: { kind: "OFF" } )) - client.variation(feature[:key], bad_user, "default") + client.variation_detail("key", user, "default") end end From d2c2ab81abd6e19934a2e444993cef1e1285e069 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:03:36 -0700 Subject: [PATCH 012/292] misc cleanup --- lib/ldclient-rb/evaluation.rb | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index b803f4a2..7a316aca 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -156,21 +156,21 @@ def evaluate(flag, user, store, logger) return EvalResult.new(detail, events) end - return EvalResult.new(get_off_value(flag, { kind: 'OFF' }), events) + return EvalResult.new(get_off_value(flag, { kind: 'OFF' }, logger), events) end def eval_internal(flag, user, store, events, logger) prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) if !prereq_failure_reason.nil? - return get_off_value(flag, prereq_failure_reason) + return get_off_value(flag, prereq_failure_reason, logger) end # Check user target matches (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }, logger) end end end @@ -240,13 +240,6 @@ def check_prerequisites(flag, user, store, events, logger) { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } end - def get_variation(flag, index) - if index >= flag[:variations].length - raise EvaluationError, "Invalid variation index" - end - flag[:variations][index] - end - def rule_match_user(rule, user, store) return false if !rule[:clauses] @@ -386,7 +379,7 @@ def match_any(op, value, values) :private - def get_variation(flag, index, reason) + def get_variation(flag, index, reason, logger) if index < 0 || index >= flag[:variations].length logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") return error_result('MALFORMED_FLAG') @@ -394,11 +387,11 @@ def get_variation(flag, index, reason) EvaluationDetail.new(flag[:variations][index], index, reason) end - def get_off_value(flag, reason) + def get_off_value(flag, reason, logger) if flag[:offVariation].nil? # off variation unspecified - return default value return EvaluationDetail.new(nil, nil, reason) end - get_variation(flag, flag[:offVariation], reason) + get_variation(flag, flag[:offVariation], reason, logger) end def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) @@ -407,7 +400,7 @@ def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return error_result('MALFORMED_FLAG') end - return get_variation(flag, index, reason) + return get_variation(flag, index, reason, logger) end end end From 64a00a1a9388e85cb26e5650da97fa2029198d64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:14:37 -0700 Subject: [PATCH 013/292] misc cleanup, more error checking --- lib/ldclient-rb/evaluation.rb | 6 +- spec/evaluation_spec.rb | 111 +++++++++++++++++++++++++++++++++- 2 files changed, 111 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 7a316aca..7dfbc3db 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -132,9 +132,6 @@ def self.comparator(converter) end } - class EvaluationError < StandardError - end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) @@ -268,9 +265,8 @@ def clause_match_user_no_segments(clause, user) return false if val.nil? op = OPERATORS[clause[:op].to_sym] - if op.nil? - raise EvaluationError, "Unsupported operator #{clause[:op]} in evaluation" + return false end if val.is_a? Enumerable diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index d5ee1097..9cb148ff 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -55,6 +55,38 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq([]) end + it "returns an error if off variation is too high" do + flag = { + key: 'feature', + on: false, + offVariation: 999, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if off variation is negative" do + flag = { + key: 'feature', + on: false, + offVariation: -1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + it "returns off variation if prerequisite is not found" do flag = { key: 'feature0', @@ -162,9 +194,69 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq(events_should_be) end + it "returns an error if fallthrough variation is too high" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: 999 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough variation is negative" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: -1 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough has no variation or rollout" do + flag = { + key: 'feature', + on: true, + fallthrough: { }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough has a rollout with no variations" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { variations: [] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + it "matches user from targets" do flag = { - key: 'feature0', + key: 'feature', on: true, targets: [ { values: [ 'whoever', 'userkey' ], variation: 2 } @@ -259,6 +351,23 @@ def boolean_flag_with_clauses(clauses) expect(evaluate(flag, user, features, logger).detail.value).to be false end + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(evaluate(flag, user, features, logger).detail.value).to be true + end + it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } From 46b642b0c0498bfba69577a544226a33f9095cd6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:49:58 -0700 Subject: [PATCH 014/292] don't keep evaluating prerequisites if one fails --- lib/ldclient-rb/evaluation.rb | 10 ++-------- spec/evaluation_spec.rb | 6 +++--- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 7dfbc3db..51cf3c66 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -192,8 +192,6 @@ def eval_internal(flag, user, store, events, logger) end def check_prerequisites(flag, user, store, events, logger) - failed_prereqs = [] - (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -227,14 +225,10 @@ def check_prerequisites(flag, user, store, events, logger) end end if !prereq_ok - failed_prereqs.push(prereq_key) + return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } end end - - if failed_prereqs.empty? - return nil - end - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } + nil end def rule_match_user(rule, user, store) diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 9cb148ff..7f0c82b4 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -98,7 +98,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['badfeature'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) result = evaluate(flag, user, features, logger) expect(result.detail).to eq(detail) expect(result.events).to eq([]) @@ -125,7 +125,7 @@ def boolean_flag_with_clauses(clauses) features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil @@ -155,7 +155,7 @@ def boolean_flag_with_clauses(clauses) features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil From 855c4e2be634b475957d46cda6870d1c52b326ed Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:28:28 -0700 Subject: [PATCH 015/292] doc comment --- lib/ldclient-rb/ldclient.rb | 40 ++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 8efd422a..1d5c23a1 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -115,7 +115,7 @@ def initialized? # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default=false the default value of the flag + # @param default the default value of the flag # # @return the variation to show the user, or the # default value if there's an an error @@ -123,6 +123,44 @@ def variation(key, user, default) evaluate_internal(key, user, default, false).value end + # + # Determines the variation of a feature flag for a user, like `variation`, but also + # provides additional information about how this value was calculated. + # + # The return value of `variation_detail` is an `EvaluationDetail` object, which has + # three properties: + # + # `value`: the value that was calculated for this user (same as the return value + # of `variation`) + # + # `variation`: the positional index of this value in the flag, e.g. 0 for the first + # variation - or `nil` if it is the default value + # + # `reason`: a hash describing the main reason why this value was selected. Its `:kind` + # property will be one of the following: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules; the `:ruleIndex` and + # `:ruleId` properties indicate the positional index and unique identifier of the rule + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation; the + # `:prerequisiteKey` property indicates the key of the prerequisite that failed + # * `'ERROR'`: the flag could not be evaluated, e.g. because it does not exist or due + # to an unexpected error, and therefore returned the default value; the `:errorKind` + # property describes the nature of the error, such as `'FLAG_NOT_FOUND'` + # + # The `reason` will also be included in analytics events, if you are capturing + # detailed event data for this flag. + # + # @param key [String] the unique feature key for the feature flag, as shown + # on the LaunchDarkly dashboard + # @param user [Hash] a hash containing parameters for the end user requesting the flag + # @param default the default value of the flag + # + # @return an `EvaluationDetail` object describing the result + # def variation_detail(key, user, default) evaluate_internal(key, user, default, true) end From a0f002f3c1e1cdb8313b5f116d9ba909e4d0e17d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:34:25 -0700 Subject: [PATCH 016/292] rename variation to variation_index --- lib/ldclient-rb/evaluation.rb | 12 ++++++------ lib/ldclient-rb/ldclient.rb | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 51cf3c66..bd4544dc 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -5,9 +5,9 @@ module LaunchDarkly # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail - def initialize(value, variation, reason) + def initialize(value, variation_index, reason) @value = value - @variation = variation + @variation_index = variation_index @reason = reason end @@ -17,13 +17,13 @@ def initialize(value, variation, reason) # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. # 0 for the first variation - or `nil` if the default value was returned. - attr_reader :variation + attr_reader :variation_index # @return [Hash] An object describing the main factor that influenced the flag evaluation value. attr_reader :reason def ==(other) - @value == other.value && @variation == other.variation && @reason == other.reason + @value == other.value && @variation_index == other.variation_index && @reason == other.reason end end @@ -208,7 +208,7 @@ def check_prerequisites(flag, user, store, events, logger) event = { kind: "feature", key: prereq_key, - variation: prereq_res.variation, + variation: prereq_res.variation_index, value: prereq_res.value, version: prereq_flag[:version], prereqOf: flag[:key], @@ -216,7 +216,7 @@ def check_prerequisites(flag, user, store, events, logger) debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.variation != prerequisite[:variation] + if prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end rescue => exn diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1d5c23a1..177b91a2 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -133,8 +133,8 @@ def variation(key, user, default) # `value`: the value that was calculated for this user (same as the return value # of `variation`) # - # `variation`: the positional index of this value in the flag, e.g. 0 for the first - # variation - or `nil` if it is the default value + # `variation_index`: the positional index of this value in the flag, e.g. 0 for the + # first variation - or `nil` if the default value was returned # # `reason`: a hash describing the main reason why this value was selected. Its `:kind` # property will be one of the following: @@ -240,7 +240,7 @@ def all_flags_state(user, options={}) end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result.detail.value, result.detail.variation, with_reasons ? result.detail.reason : nil) + state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) @@ -305,7 +305,7 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end detail = res.detail - if detail.variation.nil? + if detail.variation_index.nil? detail = EvaluationDetail.new(default, nil, detail.reason) end @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) @@ -329,7 +329,7 @@ def make_feature_event(flag, user, detail, default, with_reasons) kind: "feature", key: flag[:key], user: user, - variation: detail.variation, + variation: detail.variation_index, value: detail.value, default: default, version: flag[:version], From 4ec43db7e4b7d58ad04bf5f9dde015f0eed0a816 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:44:24 -0700 Subject: [PATCH 017/292] comment --- lib/ldclient-rb/ldclient.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 177b91a2..1c2d2257 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -211,7 +211,7 @@ def all_flags(user) # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included - # in the state. By default, they are not included. + # in the state (see `variation_detail`). By default, they are not included. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) From 9622e0116f5b4a513e705630a19603842d07cd75 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 23 Aug 2018 17:11:29 -0700 Subject: [PATCH 018/292] more test coverage, convenience method --- lib/ldclient-rb/evaluation.rb | 6 ++++++ lib/ldclient-rb/ldclient.rb | 2 +- spec/ldclient_spec.rb | 17 +++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index bd4544dc..4f6cbb0e 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -22,6 +22,12 @@ def initialize(value, variation_index, reason) # @return [Hash] An object describing the main factor that influenced the flag evaluation value. attr_reader :reason + # @return [boolean] True if the flag evaluated to the default value rather than to one of its + # variations. + def default_value? + variation_index.nil? + end + def ==(other) @value == other.value && @variation_index == other.variation_index && @reason == other.reason end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1c2d2257..a87344ed 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -305,7 +305,7 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end detail = res.detail - if detail.variation_index.nil? + if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index efaa1438..d76f7834 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -59,6 +59,13 @@ def event_processor expect(client.variation("key", user, "default")).to eq "value" end + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + expect(client.variation("key", user, "default")).to eq "default" + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) @@ -144,6 +151,16 @@ def event_processor expect(result).to eq expected end + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + result = client.variation_detail("key", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'OFF' }) + expect(result).to eq expected + expect(result.default_value?).to be true + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) From 084d9eacf32a6cc36ff1a150dc3bef9190ba2b64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Aug 2018 11:25:31 -0700 Subject: [PATCH 019/292] fix event generation for a prerequisite that is off --- lib/ldclient-rb/evaluation.rb | 26 ++++++++++++-------------- spec/evaluation_spec.rb | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 4f6cbb0e..aa4eb20d 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -153,17 +153,15 @@ def evaluate(flag, user, store, logger) end events = [] - - if flag[:on] - detail = eval_internal(flag, user, store, events, logger) - return EvalResult.new(detail, events) - end - - return EvalResult.new(get_off_value(flag, { kind: 'OFF' }, logger), events) + detail = eval_internal(flag, user, store, events, logger) + return EvalResult.new(detail, events) end - def eval_internal(flag, user, store, events, logger) + if !flag[:on] + return get_off_value(flag, { kind: 'OFF' }, logger) + end + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason, logger) @@ -203,14 +201,17 @@ def check_prerequisites(flag, user, store, events, logger) prereq_key = prerequisite[:key] prereq_flag = store.get(FEATURES, prereq_key) - if prereq_flag.nil? || !prereq_flag[:on] + if prereq_flag.nil? logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } prereq_ok = false - elsif !prereq_flag[:on] - prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, user, store, events, logger) + # Note that if the prerequisite flag is off, we don't consider it a match no matter what its + # off variation was. But we still need to evaluate it in order to generate an event. + if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] + prereq_ok = false + end event = { kind: "feature", key: prereq_key, @@ -222,9 +223,6 @@ def check_prerequisites(flag, user, store, events, logger) debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.variation_index != prerequisite[:variation] - prereq_ok = false - end rescue => exn Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) prereq_ok = false diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 7f0c82b4..3af960c6 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -135,6 +135,38 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq(events_should_be) end + it "returns off variation and event if prerequisite is off" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: false, + # note that even though it returns the desired variation, it is still off and therefore not a match + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + features.upsert(LaunchDarkly::FEATURES, flag1) + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + trackEvents: nil, debugEventsUntilDate: nil + }] + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + it "returns off variation and event if prerequisite is not met" do flag = { key: 'feature0', From 02b5712c434c7a4e6524d6e3752c09be4437feca Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Aug 2018 11:27:26 -0700 Subject: [PATCH 020/292] fix private --- lib/ldclient-rb/evaluation.rb | 2 +- lib/ldclient-rb/ldclient.rb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 4f6cbb0e..a16d9adb 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -373,7 +373,7 @@ def match_any(op, value, values) return false end - :private + private def get_variation(flag, index, reason, logger) if index < 0 || index >= flag[:variations].length diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a87344ed..7e86662b 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -261,7 +261,7 @@ def close @store.stop end - :private + private # @return [EvaluationDetail] def evaluate_internal(key, user, default, include_reasons_in_events) From 88676380bed1f147d04c8852f58ddb4f294e0eb5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 4 Oct 2018 19:04:17 -0700 Subject: [PATCH 021/292] add option to reduce front-end metadata for untracked flags --- lib/ldclient-rb/flags_state.rb | 10 ++++++--- lib/ldclient-rb/ldclient.rb | 10 +++++++-- spec/flags_state_spec.rb | 3 +-- spec/ldclient_spec.rb | 41 ++++++++++++++++++++++++++++++++-- 4 files changed, 55 insertions(+), 9 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 05079920..a6036bde 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,13 +15,17 @@ def initialize(valid) end # Used internally to build the state map. - def add_flag(flag, value, variation, reason = nil) + def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) key = flag[:key] @flag_values[key] = value - meta = { version: flag[:version], trackEvents: flag[:trackEvents] } + meta = {} + if !details_only_if_tracked || flag[:trackEvents] || flag[:debugEventsUntilDate] + meta[:version] = flag[:version] + meta[:reason] = reason if !reason.nil? + end meta[:variation] = variation if !variation.nil? + meta[:trackEvents] = true if flag[:trackEvents] meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - meta[:reason] = reason if !reason.nil? @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 7e86662b..4ad7928e 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -212,6 +212,10 @@ def all_flags(user) # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included # in the state (see `variation_detail`). By default, they are not included. + # @option options [Boolean] :details_only_for_tracked_flags (false) True if any flag metadata that is + # normally only used for event generation - such as flag versions and evaluation reasons - should be + # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size + # of the JSON data if you are passing the flag state to the front end. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) @@ -234,16 +238,18 @@ def all_flags_state(user, options={}) state = FeatureFlagsState.new(true) client_only = options[:client_side_only] || false with_reasons = options[:with_reasons] || false + details_only_if_tracked = options[:details_only_for_tracked_flags] || false features.each do |k, f| if client_only && !f[:clientSide] next end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil) + state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, + details_only_if_tracked) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) + state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil, details_only_if_tracked) end end diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 3d21029b..bda55b11 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -42,8 +42,7 @@ '$flagsState' => { 'key1' => { :variation => 0, - :version => 100, - :trackEvents => false + :version => 100 }, 'key2' => { :variation => 1, diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index d76f7834..6b923775 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -233,8 +233,7 @@ def event_processor '$flagsState' => { 'key1' => { :variation => 0, - :version => 100, - :trackEvents => false + :version => 100 }, 'key2' => { :variation => 1, @@ -263,6 +262,44 @@ def event_processor expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) end + it "can omit details for untracked flags" do + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: 1000 } + + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + 'key3' => 'value3', + '$flagsState' => { + 'key1' => { + :variation => 0, + :version => 100 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true + }, + 'key3' => { + :variation => 1, + :version => 300, + :debugEventsUntilDate => 1000 + } + }, + '$valid' => true + }) + end + it "returns empty state for nil user" do config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) From 9ea43e022a331d7c5ad577aad0b6d68d59ca22bd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 8 Oct 2018 16:42:06 -0700 Subject: [PATCH 022/292] fix logic for whether a flag is tracked in all_flags_state --- lib/ldclient-rb/flags_state.rb | 6 +++++- spec/ldclient_spec.rb | 10 +++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index a6036bde..b761149c 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -19,7 +19,11 @@ def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = fal key = flag[:key] @flag_values[key] = value meta = {} - if !details_only_if_tracked || flag[:trackEvents] || flag[:debugEventsUntilDate] + with_details = !details_only_if_tracked || flag[:trackEvents] + if !with_details && flag[:debugEventsUntilDate] + with_details = flag[:debugEventsUntilDate] > (Time.now.to_f * 1000).to_i + end + if with_details meta[:version] = flag[:version] meta[:reason] = reason if !reason.nil? end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6b923775..262f53f9 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -263,13 +263,14 @@ def event_processor end it "can omit details for untracked flags" do + future_time = (Time.now.to_f * 1000).to_i + 100000 flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } - flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: 1000 } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) - state = client.all_flags_state({ key: 'userkey' }) + state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true values = state.values_map @@ -282,8 +283,7 @@ def event_processor 'key3' => 'value3', '$flagsState' => { 'key1' => { - :variation => 0, - :version => 100 + :variation => 0 }, 'key2' => { :variation => 1, @@ -293,7 +293,7 @@ def event_processor 'key3' => { :variation => 1, :version => 300, - :debugEventsUntilDate => 1000 + :debugEventsUntilDate => future_time } }, '$valid' => true From cce8e84964835b8d6d02ddff612a1af1e179e1c9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 10:23:48 -0700 Subject: [PATCH 023/292] implement file data source --- ldclient-rb.gemspec | 1 + lib/ldclient-rb.rb | 1 + lib/ldclient-rb/config.rb | 10 +- lib/ldclient-rb/file_data_source.rb | 209 ++++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 33 +++-- spec/file_data_source_spec.rb | 167 ++++++++++++++++++++++ 6 files changed, 404 insertions(+), 17 deletions(-) create mode 100644 lib/ldclient-rb/file_data_source.rb create mode 100644 spec/file_data_source_spec.rb diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index a9bbfb23..9e7d5d04 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -40,4 +40,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "hashdiff", "~> 0.2" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" + spec.add_runtime_dependency "listen", "~> 3.0" end diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 7264b220..d3ee6ffc 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -18,3 +18,4 @@ require "ldclient-rb/events" require "ldclient-rb/redis_store" require "ldclient-rb/requestor" +require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 3b62b2a3..dc89d30a 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -61,8 +61,11 @@ class Config # @option opts [Boolean] :inline_users_in_events (false) Whether to include full user details in every # analytics event. By default, events will only include the user key, except for one "index" event # that provides the full details for the user. - # @option opts [Object] :update_processor An object that will receive feature flag data from LaunchDarkly. - # Defaults to either the streaming or the polling processor, can be customized for tests. + # @option opts [Object] :update_processor (DEPRECATED) An object that will receive feature flag data from + # LaunchDarkly. Defaults to either the streaming or the polling processor, can be customized for tests. + # @option opts [Object] :update_processor_factory A function that takes the SDK and configuration object + # as parameters, and returns an object that can obtain feature flag data and put it into the feature + # store. Defaults to creating either the streaming or the polling processor, can be customized for tests. # @return [type] [description] # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity def initialize(opts = {}) @@ -88,6 +91,7 @@ def initialize(opts = {}) @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false @update_processor = opts[:update_processor] + @update_processor_factory = opts[:update_processor_factory] end # @@ -218,6 +222,8 @@ def offline? attr_reader :update_processor + attr_reader :update_processor_factory + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb new file mode 100644 index 00000000..65ba0735 --- /dev/null +++ b/lib/ldclient-rb/file_data_source.rb @@ -0,0 +1,209 @@ +require 'concurrent/atomics' +require 'json' +require 'yaml' +require 'listen' +require 'pathname' + +module LaunchDarkly + + # + # Provides a way to use local files as a source of feature flag state. This would typically be + # used in a test environment, to operate using a predetermined feature flag state without an + # actual LaunchDarkly connection. + # + # To use this component, call `FileDataSource.factory`, and store its return value in the + # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # to `factory`, set `paths` to the file path(s) of your data file(s): + # + # config.update_processor_factory = FileDataSource.factory(paths: [ myFilePath ]) + # + # This will cause the client not to connect to LaunchDarkly to get feature flags. The + # client may still make network connections to send analytics events, unless you have disabled + # this with Config.send_events or Config.offline. + # + # Flag data files can be either JSON or YAML. They contain an object with three possible + # properties: + # + # - "flags": Feature flag definitions. + # - "flagValues": Simplified feature flags that contain only a value. + # - "segments": User segment definitions. + # + # The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + # and is subject to change. Rather than trying to construct these objects yourself, it is simpler + # to request existing flags directly from the LaunchDarkly server in JSON format, and use this + # output as the starting point for your file. In Linux you would do this: + # + # curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + # + # The output will look something like this (but with many more properties): + # + # { + # "flags": { + # "flag-key-1": { + # "key": "flag-key-1", + # "on": true, + # "variations": [ "a", "b" ] + # } + # }, + # "segments": { + # "segment-key-1": { + # "key": "segment-key-1", + # "includes": [ "user-key-1" ] + # } + # } + # } + # + # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + # set specific flag keys to specific values. For that, you can use a much simpler format: + # + # { + # "flagValues": { + # "my-string-flag-key": "value-1", + # "my-boolean-flag-key": true, + # "my-integer-flag-key": 3 + # } + # } + # + # Or, in YAML: + # + # flagValues: + # my-string-flag-key: "value-1" + # my-boolean-flag-key: true + # my-integer-flag-key: 1 + # + # It is also possible to specify both "flags" and "flagValues", if you want some flags + # to have simple values and others to have complex behavior. However, it is an error to use the + # same flag key or segment key more than once, either in a single file or across multiple files. + # + # If the data source encounters any error in any file-- malformed content, a missing file, or a + # duplicate key-- it will not load flags from any of the files. + # + class FileDataSource + def self.factory(options={}) + return Proc.new do |sdk_key, config| + FileDataSourceImpl.new(config.feature_store, config.logger, options) + end + end + end + + class FileDataSourceImpl + def initialize(feature_store, logger, options={}) + @feature_store = feature_store + @logger = logger + @paths = options[:paths] || [] + @auto_update = options[:auto_update] + @initialized = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + end + + def initialized? + @initialized.value + end + + def start + ready = Concurrent::Event.new + + # We will return immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking "initialized?" + ready.set + + load_all + + if @auto_update + # If we're going to watch files, then the start event will be set the first time we get + # a successful load. + @listener = start_listener + end + + ready + end + + def stop + @listener.stop if !@listener.nil? + end + + private + + def load_all + all_data = { + FEATURES => {}, + SEGMENTS => {} + } + @paths.each do |path| + begin + load_file(path, all_data) + rescue => exn + Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) + return + end + end + @feature_store.init(all_data) + @initialized.make_true + end + + def load_file(path, all_data) + parsed = parse_content(IO.read(path)) + (parsed[:flags] || {}).each do |key, flag| + add_item(all_data, FEATURES, flag) + end + (parsed[:flagValues] || {}).each do |key, value| + add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) + end + (parsed[:segments] || {}).each do |key, segment| + add_item(all_data, SEGMENTS, segment) + end + end + + def parse_content(content) + if content.strip.start_with?("{") + JSON.parse(content, symbolize_names: true) + else + symbolize_all_keys(YAML.load(content)) + end + end + + def symbolize_all_keys(value) + # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and + # the SDK expects all objects to be formatted that way. + if value.is_a?(Hash) + value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h + elsif value.is_a?(Array) + value.map{ |v| symbolize_all_keys(v) } + else + value + end + end + + def add_item(all_data, kind, item) + items = all_data[kind] || {} + if !items[item[:key]].nil? + raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" + end + items[item[:key]] = item + end + + def make_flag_with_value(key, value) + { + key: key, + on: true, + fallthrough: { variation: 0 }, + variations: [ value ] + } + end + + def start_listener + resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + path_set = resolved_paths.to_set + dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq + l = Listen.to(*dir_paths) do |modified, added, removed| + paths = modified + added + removed + if paths.any? { |p| path_set.include?(p) } + load_all + end + end + l.start + l + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 4ad7928e..94c24229 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -39,22 +39,11 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) return # requestor and update processor are not used in this mode end - requestor = Requestor.new(sdk_key, config) - - if @config.offline? - @update_processor = NullUpdateProcessor.new + if @config.update_processor + @update_processor = @config.update_processor else - if @config.update_processor.nil? - if @config.stream? - @update_processor = StreamProcessor.new(sdk_key, config, requestor) - else - @config.logger.info { "Disabling streaming API" } - @config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } - @update_processor = PollingProcessor.new(config, requestor) - end - else - @update_processor = @config.update_processor - end + factory = @config.update_processor || self.method(:create_default_update_processor) + @update_processor = factory.call(sdk_key, config) end ready = @update_processor.start @@ -269,6 +258,20 @@ def close private + def create_default_update_processor(sdk_key, config) + if config.offline? + return NullUpdateProcessor.new + end + requestor = Requestor.new(sdk_key, config) + if config.stream? + StreamProcessor.new(sdk_key, config, requestor) + else + config.logger.info { "Disabling streaming API" } + config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } + PollingProcessor.new(config, requestor) + end + end + # @return [EvaluationDetail] def evaluate_internal(key, user, default, include_reasons_in_events) if @config.offline? diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb new file mode 100644 index 00000000..c163d385 --- /dev/null +++ b/spec/file_data_source_spec.rb @@ -0,0 +1,167 @@ +require "spec_helper" +require "tempfile" + +describe LaunchDarkly::FileDataSource do + let(:full_flag_1_key) { "flag1" } + let(:flag_value_1_key) { "flag2" } + let(:all_flag_keys) { [ full_flag_1_key, flag_value_1_key ] } + let(:full_segment_1_key) { "seg1" } + let(:all_segment_keys) { [ full_segment_1_key ] } + + let(:flag_only_json) { <<-EOF + { + "flags": { + "flag1": { + "key": "flag1", + "on": true + } + } + } +EOF +} + + let(:all_properties_json) { <<-EOF + { + "flags": { + "flag1": { + "key": "flag1", + "on": true + } + }, + "flagValues": { + "flag2": "value2" + }, + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +EOF + } + + let(:all_properties_yaml) { <<-EOF +--- +flags: + flag1: + key: flag1 + "on": true +flagValues: + flag2: value2 +segments: + seg1: + key: seg1 + include: ["user1"] +EOF + } + + let(:bad_file_path) { "no-such-file" } + + before do + @config = LaunchDarkly::Config.new + @store = @config.feature_store + end + + def make_temp_file(content) + file = Tempfile.new('flags') + IO.write(file, content) + file + end + + def with_data_source(options) + factory = LaunchDarkly::FileDataSource.factory(options) + ds = factory.call('', @config) + begin + yield ds + ensure + ds.stop + end + end + + it "doesn't load flags prior to start" do + file = make_temp_file('{"flagValues":{"key":"value"}}') + with_data_source({ paths: [ file.path ] }) do |ds| + expect(@store.initialized?).to eq(false) + expect(@store.all(LaunchDarkly::FEATURES)).to eq({}) + expect(@store.all(LaunchDarkly::SEGMENTS)).to eq({}) + end + end + + it "loads flags on start - from JSON" do + file = make_temp_file(all_properties_json) + with_data_source({ paths: [ file.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(true) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + end + end + + it "loads flags on start - from YAML" do + file = make_temp_file(all_properties_yaml) + with_data_source({ paths: [ file.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(true) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + end + end + + it "sets start event and initialized on successful load" do + file = make_temp_file(all_properties_json) + with_data_source({ paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(true) + end + end + + it "sets start event and does not set initialized on unsuccessful load" do + with_data_source({ paths: [ bad_file_path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(false) + end + end + + it "does not reload modified file if auto-update is off" do + file = make_temp_file(flag_only_json) + + with_data_source({ paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + + IO.write(file, all_properties_json) + sleep(0.5) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + end + end + + it "reloads modified file if auto-update is on" do + file = make_temp_file(flag_only_json) + + with_data_source({ auto_update: true, paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + + sleep(1) + IO.write(file, all_properties_json) + + max_time = 10 + ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } + expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" + end + end + + def wait_for_condition(max_time) + deadline = Time.now + max_time + while Time.now < deadline + return true if yield + sleep(0.1) + end + false + end +end From 22ebdeddf21c3d7cf9602add1442e934ead6b43d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 11:03:21 -0700 Subject: [PATCH 024/292] add poll interval param, tolerate single file path string, add doc comments --- lib/ldclient-rb/file_data_source.rb | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 65ba0735..c61ddcf9 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -80,6 +80,20 @@ module LaunchDarkly # duplicate key-- it will not load flags from any of the files. # class FileDataSource + # + # Returns a factory for the file data source component. + # + # @param options [Hash] the configuration options + # @option options [Array] :paths The paths of the source files for loading flag data. These + # may be absolute paths or relative to the current working directory. + # @option options [Boolean] :auto_update True if the data source should watch for changes to + # the source file(s) and reload flags whenever there is a change. Note that auto-updating + # will only work if all of the files you specified have valid directory paths at startup time. + # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for + # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do + # not need to set this as there is a native OS mechanism for detecting file changes; on other + # platforms, the default interval is one second. + # def self.factory(options={}) return Proc.new do |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) @@ -92,7 +106,11 @@ def initialize(feature_store, logger, options={}) @feature_store = feature_store @logger = logger @paths = options[:paths] || [] + if @paths.is_a? String + @paths = [ @paths ] + end @auto_update = options[:auto_update] + @poll_interval = options[:poll_interval] @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new end @@ -196,7 +214,11 @@ def start_listener resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - l = Listen.to(*dir_paths) do |modified, added, removed| + opts = {} + if !@poll_interval.nil? + opts[:latency] = @poll_interval + end + l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed if paths.any? { |p| path_set.include?(p) } load_all From b864390a2079c6588e3fae0d8f8cfce359136cb6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 12:02:29 -0700 Subject: [PATCH 025/292] make listen dependency optional --- ldclient-rb.gemspec | 2 +- lib/ldclient-rb/file_data_source.rb | 23 ++++++++++++++++++++--- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9e7d5d04..0b8f4f9d 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -29,6 +29,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" + spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] @@ -40,5 +41,4 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "hashdiff", "~> 0.2" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" - spec.add_runtime_dependency "listen", "~> 3.0" end diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index c61ddcf9..833d6ec3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -1,10 +1,21 @@ require 'concurrent/atomics' require 'json' require 'yaml' -require 'listen' require 'pathname' module LaunchDarkly + # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the + # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' + # gem has been provided by the host app. + @@have_listen = false + begin + require 'listen' + @@have_listen = true + rescue + end + def self.can_watch_files? + @@have_listen + end # # Provides a way to use local files as a source of feature flag state. This would typically be @@ -87,8 +98,10 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Note that auto-updating - # will only work if all of the files you specified have valid directory paths at startup time. + # the source file(s) and reload flags whenever there is a change. In order to use this + # feature, you must install the 'listen' gem - it is not included by default to avoid adding + # unwanted dependencies to the SDK. Note that auto-updating will only work if all of the files + # you specified have valid directory paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do # not need to set this as there is a native OS mechanism for detecting file changes; on other @@ -110,6 +123,10 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] + if @auto_update && !LaunchDarkly::can_watch_files? + @logger.error { "[LDClient] To use the auto_update option for FileDataSource, you must install the 'listen' gem." } + @auto_update = false + end @poll_interval = options[:poll_interval] @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new From 789b5a4b54de8d84802af0579bacabbd07f92169 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 12:04:07 -0700 Subject: [PATCH 026/292] readme --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 1790b2d4..ead2bb6b 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,10 @@ else end ``` +Using flag data from a file +--------------------------- +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. + Learn more ----------- From 31a62c59a8f2209dbd758ca27fe113825b2a2943 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 13:20:02 -0700 Subject: [PATCH 027/292] fix key handling and client integration, add tests --- lib/ldclient-rb/file_data_source.rb | 2 +- lib/ldclient-rb/ldclient.rb | 2 +- spec/file_data_source_spec.rb | 46 ++++++++++++++++++++++++++--- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 833d6ec3..10588b5d 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -215,7 +215,7 @@ def add_item(all_data, kind, item) if !items[item[:key]].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[item[:key]] = item + items[item[:key].to_sym] = item end def make_flag_with_value(key, value) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 94c24229..f8a75780 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -42,7 +42,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.update_processor @update_processor = @config.update_processor else - factory = @config.update_processor || self.method(:create_default_update_processor) + factory = @config.update_processor_factory || self.method(:create_default_update_processor) @update_processor = factory.call(sdk_key, config) end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c163d385..cf5d52ad 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -3,17 +3,23 @@ describe LaunchDarkly::FileDataSource do let(:full_flag_1_key) { "flag1" } + let(:full_flag_1_value) { "on" } let(:flag_value_1_key) { "flag2" } - let(:all_flag_keys) { [ full_flag_1_key, flag_value_1_key ] } + let(:flag_value_1) { "value2" } + let(:all_flag_keys) { [ full_flag_1_key.to_sym, flag_value_1_key.to_sym ] } let(:full_segment_1_key) { "seg1" } - let(:all_segment_keys) { [ full_segment_1_key ] } + let(:all_segment_keys) { [ full_segment_1_key.to_sym ] } let(:flag_only_json) { <<-EOF { "flags": { "flag1": { "key": "flag1", - "on": true + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] } } } @@ -25,7 +31,11 @@ "flags": { "flag1": { "key": "flag1", - "on": true + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] } }, "flagValues": { @@ -156,6 +166,34 @@ def with_data_source(options) end end + it "evaluates simplified flag with client as expected" do + file = make_temp_file(all_properties_json) + factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + client = LaunchDarkly::LDClient.new('sdkKey', config) + + begin + value = client.variation(flag_value_1_key, { key: 'user' }, '') + expect(value).to eq(flag_value_1) + ensure + client.close + end + end + + it "evaluates full flag with client as expected" do + file = make_temp_file(all_properties_json) + factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + client = LaunchDarkly::LDClient.new('sdkKey', config) + + begin + value = client.variation(full_flag_1_key, { key: 'user' }, '') + expect(value).to eq(full_flag_1_value) + ensure + client.close + end + end + def wait_for_condition(max_time) deadline = Time.now + max_time while Time.now < deadline From 778cb6dc5e4c2c367ccd2c1c7399a1338ec5196a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 12:08:46 -0700 Subject: [PATCH 028/292] debugging --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..58c754ba 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,7 +22,7 @@ ruby-docker-template: &ruby-docker-template - run: gem install bundler - run: bundle install - run: mkdir ./rspec - - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: LISTEN_GEM_DEBUGGING=2 bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: From 20dbef28105da9a1eca453ee86f2ff90267f4793 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 12:13:58 -0700 Subject: [PATCH 029/292] debugging --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 58c754ba..05bc4746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,7 +22,7 @@ ruby-docker-template: &ruby-docker-template - run: gem install bundler - run: bundle install - run: mkdir ./rspec - - run: LISTEN_GEM_DEBUGGING=2 bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; + LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; done From f1c00b1616a6767dd350c44497ba71d6b03e4bff Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:47:10 -0700 Subject: [PATCH 030/292] add fallback polling logic, fix tests --- lib/ldclient-rb/file_data_source.rb | 85 ++++++++++++++++++++++------- spec/file_data_source_spec.rb | 23 +++++++- 2 files changed, 86 insertions(+), 22 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 10588b5d..ae19bea8 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -13,7 +13,7 @@ module LaunchDarkly @@have_listen = true rescue end - def self.can_watch_files? + def self.have_listen? @@have_listen end @@ -23,10 +23,10 @@ def self.can_watch_files? # actual LaunchDarkly connection. # # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # `update_processor_class` property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # config.update_processor_factory = FileDataSource.factory(paths: [ myFilePath ]) + # config.update_processor_class = FileDataSource.factory(paths: [ myFilePath ]) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled @@ -98,14 +98,15 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. In order to use this - # feature, you must install the 'listen' gem - it is not included by default to avoid adding - # unwanted dependencies to the SDK. Note that auto-updating will only work if all of the files - # you specified have valid directory paths at startup time. + # the source file(s) and reload flags whenever there is a change. Note that the default + # implementation of this feature is based on polling the filesystem, which may not perform + # well. If you install the 'listen' gem (not included by default, to avoid adding unwanted + # dependencies to the SDK), its native file watching mechanism will be used instead. Note + # that auto-updating will only work if all of the files you specified have valid directory + # paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for - # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do - # not need to set this as there is a native OS mechanism for detecting file changes; on other - # platforms, the default interval is one second. + # file modifications - used only if auto_update is true, and if the native file-watching + # mechanism from 'listen' is not being used. # def self.factory(options={}) return Proc.new do |sdk_key, config| @@ -123,11 +124,8 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] - if @auto_update && !LaunchDarkly::can_watch_files? - @logger.error { "[LDClient] To use the auto_update option for FileDataSource, you must install the 'listen' gem." } - @auto_update = false - end - @poll_interval = options[:poll_interval] + @use_listen = @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + @poll_interval = options[:poll_interval] || 1 @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new end @@ -229,12 +227,17 @@ def make_flag_with_value(key, value) def start_listener resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + if @use_listen + start_listener_with_listen_gem(resolved_paths) + else + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all)) + end + end + + def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - opts = {} - if !@poll_interval.nil? - opts[:latency] = @poll_interval - end + opts = { latency: @poll_interval } l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed if paths.any? { |p| path_set.include?(p) } @@ -244,5 +247,49 @@ def start_listener l.start l end + + # + # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # + class FileDataSourcePoller + def initialize(resolved_paths, interval, reloader) + @stopped = Concurrent::AtomicBoolean.new(false) + get_file_times = Proc.new do + ret = {} + resolved_paths.each do |path| + begin + ret[path] = File.mtime(path) + rescue + ret[path] = nil + end + end + ret + end + last_times = get_file_times.call + @thread = Thread.new do + while true + sleep interval + break if @stopped.value + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end + end + if changed + reloader.call + end + end + end + end + + def stop + @stopped.make_true + @thread.run # wakes it up if it's sleeping + end + end end end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index cf5d52ad..5267a5f2 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -71,10 +71,18 @@ before do @config = LaunchDarkly::Config.new @store = @config.feature_store + @tmp_dir = Dir.mktmpdir + end + + after do + FileUtils.remove_dir(@tmp_dir) end def make_temp_file(content) - file = Tempfile.new('flags') + # Note that we don't create our files in the default temp file directory, but rather in an empty directory + # that we made. That's because (depending on the platform) the temp file directory may contain huge numbers + # of files, which can make the file watcher perform poorly enough to break the tests. + file = Tempfile.new('flags', @tmp_dir) IO.write(file, content) file end @@ -149,10 +157,11 @@ def with_data_source(options) end end - it "reloads modified file if auto-update is on" do + def test_auto_reload(options) file = make_temp_file(flag_only_json) + options[:paths] = [ file.path ] - with_data_source({ auto_update: true, paths: [ file.path ] }) do |ds| + with_data_source(options) do |ds| event = ds.start expect(event.set?).to eq(true) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) @@ -166,6 +175,14 @@ def with_data_source(options) end end + it "reloads modified file if auto-update is on" do + test_auto_reload({ auto_update: true }) + end + + it "reloads modified file in polling mode" do + test_auto_reload({ auto_update: true, force_polling: true, poll_interval: 0.1 }) + end + it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) From 198b843bba00fe92e9cfa9ef658c2649ce09be2f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:02:22 -0700 Subject: [PATCH 031/292] rm debugging --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 05bc4746..df9dac51 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; + bundle exec rspec spec; done From c5d1823372044bd067049fed90fb8e1f13428d94 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:25:16 -0700 Subject: [PATCH 032/292] debugging --- spec/file_data_source_spec.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 5267a5f2..194ebc2c 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -68,6 +68,8 @@ let(:bad_file_path) { "no-such-file" } + Thread.report_on_exception = true + before do @config = LaunchDarkly::Config.new @store = @config.feature_store From 9baffe35cf84bbfdbf77f01989437620f4124bc7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:38:15 -0700 Subject: [PATCH 033/292] debugging --- .circleci/config.yml | 2 +- spec/file_data_source_spec.rb | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..05bc4746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; + LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; done diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 194ebc2c..5267a5f2 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -68,8 +68,6 @@ let(:bad_file_path) { "no-such-file" } - Thread.report_on_exception = true - before do @config = LaunchDarkly::Config.new @store = @config.feature_store From 4d8121592756df99aefbef4c0aeb78032f544046 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:47:03 -0700 Subject: [PATCH 034/292] debugging --- lib/ldclient-rb/file_data_source.rb | 2 ++ spec/file_data_source_spec.rb | 1 + 2 files changed, 3 insertions(+) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index ae19bea8..de8ef34e 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -238,8 +238,10 @@ def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq opts = { latency: @poll_interval } + puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed + puts('*** got listener notification: #{paths}') if paths.any? { |p| path_set.include?(p) } load_all end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 5267a5f2..f06c19f9 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -168,6 +168,7 @@ def test_auto_reload(options) sleep(1) IO.write(file, all_properties_json) + puts('*** modified the file') max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } From 30d0cd270acf6518555e126bad28c689177ebb1d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:48:09 -0700 Subject: [PATCH 035/292] debugging --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index de8ef34e..9a63e56b 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -241,7 +241,7 @@ def start_listener_with_listen_gem(resolved_paths) puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed - puts('*** got listener notification: #{paths}') + puts("*** got listener notification: #{paths}") if paths.any? { |p| path_set.include?(p) } load_all end From 8cb2ed9adc1a7ac486f077eeb37d0100fa9d9bb5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:51:27 -0700 Subject: [PATCH 036/292] comment correction --- lib/ldclient-rb/file_data_source.rb | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 9a63e56b..71f3a8be 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -23,10 +23,11 @@ def self.have_listen? # actual LaunchDarkly connection. # # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_class` property of your LaunchDarkly client configuration. In the options + # `update_processor_factory` property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # config.update_processor_class = FileDataSource.factory(paths: [ myFilePath ]) + # factory = FileDataSource.factory(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(update_processor_factory: factory) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled From a10f973ad98f033bd480e2ca9568041e826cd02b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:15:29 -0700 Subject: [PATCH 037/292] documentation --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 71f3a8be..721eff75 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -107,7 +107,7 @@ class FileDataSource # paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching - # mechanism from 'listen' is not being used. + # mechanism from 'listen' is not being used. The default value is 1 second. # def self.factory(options={}) return Proc.new do |sdk_key, config| From 16cf9c086c06344d352b6e85bb6e02449af44cc1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:15:54 -0700 Subject: [PATCH 038/292] always use YAML parser --- lib/ldclient-rb/file_data_source.rb | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 721eff75..a607923d 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -190,11 +190,10 @@ def load_file(path, all_data) end def parse_content(content) - if content.strip.start_with?("{") - JSON.parse(content, symbolize_names: true) - else - symbolize_all_keys(YAML.load(content)) - end + # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while + # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least + # for all the samples of actual flag data that we've tested). + symbolize_all_keys(YAML.load(content)) end def symbolize_all_keys(value) From 27d954e7f5f84ba4b87573ff80e9304a4eedab3b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:21:29 -0700 Subject: [PATCH 039/292] report internal error that shouldn't happen --- lib/ldclient-rb/file_data_source.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index a607923d..fae68123 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -209,7 +209,8 @@ def symbolize_all_keys(value) end def add_item(all_data, kind, item) - items = all_data[kind] || {} + items = all_data[kind] + raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash if !items[item[:key]].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end From fd308a9de3142b8fd493a995411d320a42664932 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:24:28 -0700 Subject: [PATCH 040/292] add test for multiple files --- spec/file_data_source_spec.rb | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index f06c19f9..c0af4c67 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -24,7 +24,19 @@ } } EOF -} + } + + let(:segment_only_json) { <<-EOF + { + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +EOF + } let(:all_properties_json) { <<-EOF { @@ -143,6 +155,16 @@ def with_data_source(options) end end + it "can load multiple files" do + file1 = make_temp_file(flag_only_json) + file2 = make_temp_file(segment_only_json) + with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| + ds.start + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) + end + end + it "does not reload modified file if auto-update is off" do file = make_temp_file(flag_only_json) From 1d016bfc9349000c8ddffce20b48634e1e20d6b3 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:26:10 -0700 Subject: [PATCH 041/292] fix duplicate key checking (string vs. symbol problem) --- lib/ldclient-rb/file_data_source.rb | 5 +++-- spec/file_data_source_spec.rb | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index fae68123..aebd9709 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -211,10 +211,11 @@ def symbolize_all_keys(value) def add_item(all_data, kind, item) items = all_data[kind] raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash - if !items[item[:key]].nil? + key = item[:key].to_sym + if !items[key].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[item[:key].to_sym] = item + items[key] = item end def make_flag_with_value(key, value) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c0af4c67..10e49e3c 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -160,11 +160,22 @@ def with_data_source(options) file2 = make_temp_file(segment_only_json) with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| ds.start + expect(@store.initialized?).to eq(true) expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) end end + it "does not allow duplicate keys" do + file1 = make_temp_file(flag_only_json) + file2 = make_temp_file(flag_only_json) + with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(false) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([]) + end + end + it "does not reload modified file if auto-update is off" do file = make_temp_file(flag_only_json) From c3e66d35c64909084d6d879fa485497fddf6c4a4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:44:09 -0700 Subject: [PATCH 042/292] Don't use 'listen' in JRuby 9.1 --- lib/ldclient-rb/file_data_source.rb | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index aebd9709..23834be4 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -99,12 +99,12 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Note that the default - # implementation of this feature is based on polling the filesystem, which may not perform - # well. If you install the 'listen' gem (not included by default, to avoid adding unwanted - # dependencies to the SDK), its native file watching mechanism will be used instead. Note - # that auto-updating will only work if all of the files you specified have valid directory - # paths at startup time. + # the source file(s) and reload flags whenever there is a change. Auto-updating will only + # work if all of the files you specified have valid directory paths at startup time. + # Note that the default implementation of this feature is based on polling the filesystem, + # which may not perform well. If you install the 'listen' gem (not included by default, to + # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be + # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. @@ -125,7 +125,15 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] - @use_listen = @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). + # Therefore, on that platform we'll fall back to file polling instead. + if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") + @use_listen = false + else + @use_listen = true + end + end @poll_interval = options[:poll_interval] || 1 @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new From 1a36fd86ab5b867ad265e89f13d9c8e839278b39 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:50:22 -0700 Subject: [PATCH 043/292] rm debugging --- .circleci/config.yml | 2 +- lib/ldclient-rb/file_data_source.rb | 2 -- spec/file_data_source_spec.rb | 1 - 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 05bc4746..df9dac51 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; + bundle exec rspec spec; done diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 23834be4..1549f6ec 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -248,10 +248,8 @@ def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq opts = { latency: @poll_interval } - puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed - puts("*** got listener notification: #{paths}") if paths.any? { |p| path_set.include?(p) } load_all end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 10e49e3c..60107e26 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -201,7 +201,6 @@ def test_auto_reload(options) sleep(1) IO.write(file, all_properties_json) - puts('*** modified the file') max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } From 78ba8150b1a486b2a568ff7ac59f8b589fdfe98e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 12:02:32 -0700 Subject: [PATCH 044/292] better error handling in poll thread --- lib/ldclient-rb/file_data_source.rb | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 1549f6ec..c5207afb 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -240,7 +240,7 @@ def start_listener if @use_listen start_listener_with_listen_gem(resolved_paths) else - FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all)) + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) end end @@ -262,14 +262,14 @@ def start_listener_with_listen_gem(resolved_paths) # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. # class FileDataSourcePoller - def initialize(resolved_paths, interval, reloader) + def initialize(resolved_paths, interval, reloader, logger) @stopped = Concurrent::AtomicBoolean.new(false) get_file_times = Proc.new do ret = {} resolved_paths.each do |path| begin ret[path] = File.mtime(path) - rescue + rescue Errno::ENOENT ret[path] = nil end end @@ -280,17 +280,19 @@ def initialize(resolved_paths, interval, reloader) while true sleep interval break if @stopped.value - new_times = get_file_times.call - changed = false - last_times.each do |path, old_time| - new_time = new_times[path] - if !new_time.nil? && new_time != old_time - changed = true - break + begin + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end end - end - if changed - reloader.call + reloader.call if changed + rescue => exn + Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) end end end From 38f534fd3b5968a7d6f75cf5f214be768f810f9f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 12:51:09 -0700 Subject: [PATCH 045/292] don't use Thread.raise to stop PollingProcessor thread; add test for PollingProcessor.stop --- lib/ldclient-rb/polling.rb | 3 +- spec/polling_spec.rb | 81 ++++++++++++++++++++++++++------------ 2 files changed, 58 insertions(+), 26 deletions(-) diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 15965201..4ecd93f8 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -26,7 +26,8 @@ def start def stop if @stopped.make_true if @worker && @worker.alive? - @worker.raise "shutting down client" + @worker.run # causes the thread to wake up if it's currently in a sleep + @worker.join end @config.logger.info { "[LDClient] Polling connection stopped" } end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 8183b8c3..690147d0 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -3,10 +3,17 @@ describe LaunchDarkly::PollingProcessor do subject { LaunchDarkly::PollingProcessor } - let(:store) { LaunchDarkly::InMemoryFeatureStore.new } - let(:config) { LaunchDarkly::Config.new(feature_store: store) } let(:requestor) { double() } - let(:processor) { subject.new(config, requestor) } + + def with_processor(store) + config = LaunchDarkly::Config.new(feature_store: store) + processor = subject.new(config, requestor) + begin + yield processor + ensure + processor.stop + end + end describe 'successful request' do flag = { key: 'flagkey', version: 1 } @@ -22,47 +29,60 @@ it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - ready = processor.start - ready.wait - expect(store.get(LaunchDarkly::FEATURES, "flagkey")).to eq(flag) - expect(store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(segment) + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + ready.wait + expect(store.get(LaunchDarkly::FEATURES, "flagkey")).to eq(flag) + expect(store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(segment) + end end it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - ready = processor.start - ready.wait - expect(processor.initialized?).to be true - expect(store.initialized?).to be true + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + ready.wait + expect(processor.initialized?).to be true + expect(store.initialized?).to be true + end end end describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be false - expect(processor.initialized?).to be false - expect(store.initialized?).to be false + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be false + expect(processor.initialized?).to be false + expect(store.initialized?).to be false + end end end describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be true - expect(processor.initialized?).to be false + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be true + expect(processor.initialized?).to be false + end end def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be false - expect(processor.initialized?).to be false + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be false + expect(processor.initialized?).to be false + end end it 'stops immediately for error 401' do @@ -85,5 +105,16 @@ def verify_recoverable_http_error(status) verify_recoverable_http_error(503) end end -end + describe 'stop' do + it 'stops promptly rather than continuing to wait for poll interval' do + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + sleep(1) # somewhat arbitrary, but should ensure that it has started polling + start_time = Time.now + processor.stop + end_time = Time.now + expect(end_time - start_time).to be <(LaunchDarkly::Config.default_poll_interval - 5) + end + end + end +end From 5a875c8db7fff721c60040334c6da2df1133c9d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 10 Dec 2018 12:24:24 -0800 Subject: [PATCH 046/292] test on most recent patch version of each Ruby minor version --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..7fe98354 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -32,22 +32,22 @@ jobs: test-2.2: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.2.9-jessie + - image: circleci/ruby:2.2.10-jessie - image: redis test-2.3: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.3.6-jessie + - image: circleci/ruby:2.3.7-jessie - image: redis test-2.4: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.4.4-stretch + - image: circleci/ruby:2.4.5-stretch - image: redis test-2.5: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.5.1-stretch + - image: circleci/ruby:2.5.3-stretch - image: redis test-jruby-9.2: <<: *ruby-docker-template From cf7d9002e1adac2335d50ccb20b278dfedce4ad6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 17:47:40 -0800 Subject: [PATCH 047/292] factor common logic out of RedisFeatureStore, add integrations module --- lib/ldclient-rb.rb | 2 + lib/ldclient-rb/in_memory_store.rb | 2 + lib/ldclient-rb/integrations.rb | 257 +++++++++++++++++++++++ lib/ldclient-rb/interfaces.rb | 102 +++++++++ lib/ldclient-rb/redis_store.rb | 327 +++++++++++++---------------- spec/feature_store_spec_base.rb | 2 +- spec/integrations_helpers_spec.rb | 276 ++++++++++++++++++++++++ spec/redis_feature_store_spec.rb | 28 +-- 8 files changed, 803 insertions(+), 193 deletions(-) create mode 100644 lib/ldclient-rb/integrations.rb create mode 100644 lib/ldclient-rb/interfaces.rb create mode 100644 spec/integrations_helpers_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index d3ee6ffc..a1d7ffd9 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,4 +1,5 @@ require "ldclient-rb/version" +require "ldclient-rb/interfaces" require "ldclient-rb/util" require "ldclient-rb/evaluation" require "ldclient-rb/flags_state" @@ -17,5 +18,6 @@ require "ldclient-rb/event_summarizer" require "ldclient-rb/events" require "ldclient-rb/redis_store" +require "ldclient-rb/integrations" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index e3e85879..4814c85d 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -20,6 +20,8 @@ module LaunchDarkly # streaming API. # class InMemoryFeatureStore + include LaunchDarkly::Interfaces::FeatureStore + def initialize @items = Hash.new @lock = Concurrent::ReadWriteLock.new diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb new file mode 100644 index 00000000..21910b09 --- /dev/null +++ b/lib/ldclient-rb/integrations.rb @@ -0,0 +1,257 @@ +require "concurrent/atomics" + +module LaunchDarkly + module Integrations + module Redis + # + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Redis running at `localhost` with its default port. + # + # @return [String] the default Redis URL + # + def default_redis_url + 'redis://localhost:6379/0' + end + + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def default_prefix + 'launchdarkly' + end + + # + # Creates a Redis-backed persistent feature store. + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def new_feature_store(opts) + return RedisFeatureStore.new(opts) + end + end + + module Helpers + # + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # pattern that delegates part of its behavior to another object, while providing optional caching + # behavior and other logic that would otherwise be repeated in every feature store implementation. + # This makes it easier to create new database integrations by implementing only the database-specific + # logic. + # + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # implementation object. + # + class CachingStoreWrapper + include LaunchDarkly::Interfaces::FeatureStore + + INITED_CACHE_KEY = "$inited" + + private_constant :INITED_CACHE_KEY + + # + # Creates a new store wrapper instance. + # + # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param opts [Hash] a hash that may include cache-related options; all others will be ignored + # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # + def initialize(core, opts) + @core = core + + expiration_seconds = opts[:expiration] || 15 + capacity = opts[:capacity] || 1000 + if expiration_seconds > 0 + @cache = ExpiringCache.new(capacity, expiration_seconds) + else + @cache = nil + end + + @inited = Concurrent::AtomicBoolean.new(false) + end + + def init(all_data) + @core.init_internal(all_data) + @inited.make_true + + if !@cache.nil? + @cache.clear + all_data.each do |kind, items| + @cache[kind] = items_if_not_deleted(items) + items.each do |key, item| + @cache[item_cache_key(kind, key)] = [item] + end + end + end + end + + def get(kind, key) + if !@cache.nil? + cache_key = item_cache_key(kind, key) + cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values + return item_if_not_deleted(cached[0]) if !cached.nil? + end + + item = @core.get_internal(kind, key) + + if !@cache.nil? + @cache[cache_key] = [item] + end + + item_if_not_deleted(item) + end + + def all(kind) + if !@cache.nil? + items = @cache[all_cache_key(kind)] + return items if !items.nil? + end + + items = items_if_not_deleted(@core.get_all_internal(kind)) + @cache[all_cache_key(kind)] = items if !@cache.nil? + items + end + + def upsert(kind, item) + new_state = @core.upsert_internal(kind, item) + + if !@cache.nil? + @cache[item_cache_key(kind, item[:key])] = [new_state] + @cache.delete(all_cache_key(kind)) + end + end + + def delete(kind, key, version) + upsert(kind, { key: key, version: version, deleted: true }) + end + + def initialized? + return true if @inited.value + + if @cache.nil? + result = @core.initialized_internal? + else + result = @cache[INITED_CACHE_KEY] + if result.nil? + result = @core.initialized_internal? + @cache[INITED_CACHE_KEY] = result + end + end + + @inited.make_true if result + result + end + + def stop + @core.stop + end + + private + + def all_cache_key(kind) + kind + end + + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def item_if_not_deleted(item) + (item.nil? || item[:deleted]) ? nil : item + end + + def items_if_not_deleted(items) + items.select { |key, item| !item[:deleted] } + end + end + + # + # This module describes the methods that you must implement on your own object in order to + # use {CachingStoreWrapper}. + # + module FeatureStoreCore + # + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # but the wrapper will take care of updating the cache if caching is enabled. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init_internal(all_data) + end + + # + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found + # + def get_internal(kind, key) + end + + # + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def get_all_internal(kind) + end + + # + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. + # the method is expected to return the final state of the entity (i.e. either the `item` + # parameter if the update succeeded, or the previously existing entity in the store if the + # update failed; this is used for the caching logic). + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [Hash] the entity as it now exists in the store after the update + # + def upsert_internal(kind, item) + end + + # + # Checks whether this store has been initialized. This is the same as + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # for efficiency, because the wrapper will use caching and memoization in order to call the method + # as little as possible. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized_internal? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb new file mode 100644 index 00000000..09e7797d --- /dev/null +++ b/lib/ldclient-rb/interfaces.rb @@ -0,0 +1,102 @@ + +module LaunchDarkly + module Interfaces + # + # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly + # client uses the feature store to persist feature flags and related objects received from + # the LaunchDarkly service. Implementations must support concurrent access and updates. + # For more about how feature stores can be used, see: + # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # An entity that can be stored in a feature store is a hash that can be converted to and from + # JSON, and that has at a minimum the following properties: `:key`, a string that is unique + # among entities of the same kind; `:version`, an integer that is higher for newer data; + # `:deleted`, a boolean (optional, defaults to false) that if true means this is a + # placeholder for a deleted entity. + # + # Examples of a "kind" are feature flags and segments; each of these is associated with an + # object such as {LaunchDarkly::FEATURES} and {LaunchDarkly::SEGMENTS}. The "kind" objects are + # hashes with a single property, `:namespace`, which is a short string unique to that kind. + # + # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations + # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new + # implementation, see {LaunchDarkly::Integrations::Helpers} for tools that can make this task + # simpler. + # + module FeatureStore + # + # Initializes (or re-initializes) the store with the specified set of entities. Any + # existing entries will be removed. Implementations can assume that this data set is up to + # date-- there is no need to perform individual version comparisons between the existing + # objects and the supplied features. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init(all_data) + end + + # + # Returns the entity to which the specified key is mapped, if any. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found, or if the stored entity's + # `:deleted` property was true + # + def get(kind, key) + end + + # + # Returns all stored entities of the specified kind, not including deleted entities. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def all(kind) + end + + # + # Attempt to add an entity, or update an existing entity with the same key. An update + # should only succeed if the new item's `:version` is greater than the old one; + # otherwise, the method should do nothing. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # + def upsert(kind, item) + end + + # + # Attempt to delete an entity if it exists. Deletion should only succeed if the + # `version` parameter is greater than the existing entity's `:version`; otherwise, the + # method should do nothing. + # + # @param kind [Object] the kind of entity to delete + # @param key [String] the unique key of the entity + # @param version [Integer] the entity must have a lower version than this to be deleted + # + def delete(kind, key, version) + end + + # + # Checks whether this store has been initialized. That means that `init` has been called + # either by this process, or (if the store can be shared) by another process. This + # method will be called frequently, so it should be efficient. You can assume that if it + # has returned true once, it can continue to return true, i.e. a store cannot become + # uninitialized again. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end +end diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 3729ca6b..99912f5f 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,5 @@ require "concurrent/atomics" require "json" -require "thread_safe" module LaunchDarkly # @@ -13,6 +12,9 @@ module LaunchDarkly # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # + # @deprecated Use {LaunchDarkly::Integrations::Redis#new_feature_store} instead. This specific + # implementation class may change in the future. + # class RedisFeatureStore begin require "redis" @@ -22,6 +24,139 @@ class RedisFeatureStore REDIS_ENABLED = false end + include LaunchDarkly::Interfaces::FeatureStore + + # + # Internal implementation of the Redis feature store. We put a CachingStoreWrapper around this. + # + class RedisFeatureStoreCore + def initialize(opts) + @redis_opts = opts[:redis_opts] || Hash.new + if opts[:redis_url] + @redis_opts[:url] = opts[:redis_url] + end + if !@redis_opts.include?(:url) + @redis_opts[:url] = LaunchDarkly::Integrations::Redis.default_redis_url + end + max_connections = opts[:max_connections] || 16 + @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do + Redis.new(@redis_opts) + end + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis.default_prefix + @logger = opts[:logger] || Config.default_logger + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + + @stopped = Concurrent::AtomicBoolean.new(false) + + with_connection do |redis| + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + and prefix: #{@prefix}") + end + end + + def init_internal(all_data) + count = 0 + with_connection do |redis| + all_data.each do |kind, items| + redis.multi do |multi| + multi.del(items_key(kind)) + count = count + items.count + items.each { |key, item| + redis.hset(items_key(kind), key, item.to_json) + } + end + end + end + @logger.info { "RedisFeatureStore: initialized with #{count} items" } + end + + def get_internal(kind, key) + with_connection do |redis| + get_redis(redis, kind, key) + end + end + + def get_all_internal(kind) + fs = {} + with_connection do |redis| + hashfs = redis.hgetall(items_key(kind)) + hashfs.each do |k, json_item| + f = JSON.parse(json_item, symbolize_names: true) + fs[k.to_sym] = f + end + end + fs + end + + def upsert_internal(kind, new_item) + base_key = items_key(kind) + key = new_item[:key] + try_again = true + final_item = new_item + while try_again + try_again = false + with_connection do |redis| + redis.watch(base_key) do + old_item = get_redis(redis, kind, key) + before_update_transaction(base_key, key) + if old_item.nil? || old_item[:version] < new_item[:version] + result = redis.multi do |multi| + multi.hset(base_key, key, new_item.to_json) + end + if result.nil? + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + try_again = true + end + else + final_item = old_item + action = new_item[:deleted] ? "delete" : "update" + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ +in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + end + redis.unwatch + end + end + end + final_item + end + + def initialized_internal? + with_connection { |redis| redis.exists(items_key(FEATURES)) } + end + + def stop + if @stopped.make_true + @pool.shutdown { |redis| redis.close } + end + end + + private + + # exposed for testing + def before_update_transaction(base_key, key) + @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + end + + def items_key(kind) + @prefix + ":" + kind[:namespace] + end + + def cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def with_connection + @pool.with { |redis| yield(redis) } + end + + def get_redis(redis, kind, key) + json_item = redis.hget(items_key(kind), key) + json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + end + end + + private_constant :RedisFeatureStoreCore + # # Constructor for a RedisFeatureStore instance. # @@ -31,45 +166,17 @@ class RedisFeatureStore # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally - # @option opts [Object] :pool custom connection pool, used for testing only + # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) if !REDIS_ENABLED raise RuntimeError.new("can't use RedisFeatureStore because one of these gems is missing: redis, connection_pool") end - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = RedisFeatureStore.default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - Redis.new(@redis_opts) - end - @prefix = opts[:prefix] || RedisFeatureStore.default_prefix - @logger = opts[:logger] || Config.default_logger - - expiration_seconds = opts[:expiration] || 15 - capacity = opts[:capacity] || 1000 - if expiration_seconds > 0 - @cache = ExpiringCache.new(capacity, expiration_seconds) - else - @cache = nil - end - @stopped = Concurrent::AtomicBoolean.new(false) - @inited = MemoizedValue.new { - query_inited - } - - with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ -and prefix: #{@prefix}") - end + @core = RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Helpers::CachingStoreWrapper.new(@core, opts) end # @@ -77,178 +184,42 @@ def initialize(opts = {}) # running at `localhost` with its default port. # def self.default_redis_url - 'redis://localhost:6379/0' + LaunchDarkly::Integrations::Redis.default_redis_url end # # Default value for the `prefix` constructor parameter. # def self.default_prefix - 'launchdarkly' + LaunchDarkly::Integrations::Redis.default_prefix end def get(kind, key) - f = @cache.nil? ? nil : @cache[cache_key(kind, key)] - if f.nil? - @logger.debug { "RedisFeatureStore: no cache hit for #{key} in '#{kind[:namespace]}', requesting from Redis" } - f = with_connection do |redis| - begin - get_redis(kind, redis, key.to_sym) - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis in '#{kind[:namespace]}', with error: #{e}" } - nil - end - end - end - if f.nil? - @logger.debug { "RedisFeatureStore: #{key} not found in '#{kind[:namespace]}'" } - nil - elsif f[:deleted] - @logger.debug { "RedisFeatureStore: #{key} was deleted in '#{kind[:namespace]}', returning nil" } - nil - else - f - end + @wrapper.get(kind, key) end def all(kind) - fs = {} - with_connection do |redis| - begin - hashfs = redis.hgetall(items_key(kind)) - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve all '#{kind[:namespace]}' items from Redis with error: #{e}; returning none" } - hashfs = {} - end - hashfs.each do |k, jsonItem| - f = JSON.parse(jsonItem, symbolize_names: true) - if !f[:deleted] - fs[k.to_sym] = f - end - end - end - fs + @wrapper.all(kind) end def delete(kind, key, version) - update_with_versioning(kind, { key: key, version: version, deleted: true }) + @wrapper.delete(kind, key, version) end def init(all_data) - @cache.clear if !@cache.nil? - count = 0 - with_connection do |redis| - all_data.each do |kind, items| - begin - redis.multi do |multi| - multi.del(items_key(kind)) - count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } - end - items.each { |key, item| - put_cache(kind, key.to_sym, item) - } - rescue => e - @logger.error { "RedisFeatureStore: could not initialize '#{kind[:namespace]}' in Redis, error: #{e}" } - end - end - end - @inited.set(true) - @logger.info { "RedisFeatureStore: initialized with #{count} items" } + @wrapper.init(all_data) end def upsert(kind, item) - update_with_versioning(kind, item) + @wrapper.upsert(kind, item) end def initialized? - @inited.get + @wrapper.initialized? end def stop - if @stopped.make_true - @pool.shutdown { |redis| redis.close } - @cache.clear if !@cache.nil? - end - end - - private - - # exposed for testing - def before_update_transaction(base_key, key) - end - - def items_key(kind) - @prefix + ":" + kind[:namespace] - end - - def cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - def with_connection - @pool.with { |redis| yield(redis) } - end - - def get_redis(kind, redis, key) - begin - json_item = redis.hget(items_key(kind), key) - if json_item - item = JSON.parse(json_item, symbolize_names: true) - put_cache(kind, key, item) - item - else - nil - end - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis, error: #{e}" } - nil - end - end - - def put_cache(kind, key, value) - @cache[cache_key(kind, key)] = value if !@cache.nil? - end - - def update_with_versioning(kind, new_item) - base_key = items_key(kind) - key = new_item[:key] - try_again = true - while try_again - try_again = false - with_connection do |redis| - redis.watch(base_key) do - old_item = get_redis(kind, redis, key) - before_update_transaction(base_key, key) - if old_item.nil? || old_item[:version] < new_item[:version] - begin - result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) - end - if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } - try_again = true - else - put_cache(kind, key.to_sym, new_item) - end - rescue => e - @logger.error { "RedisFeatureStore: could not store #{key} in Redis, error: #{e}" } - end - else - action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ - in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } - end - redis.unwatch - end - end - end - end - - def query_inited - with_connection { |redis| redis.exists(items_key(FEATURES)) } + @wrapper.stop end end end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index d6c1cedc..0e0f1ca9 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,6 +1,6 @@ require "spec_helper" -RSpec.shared_examples "feature_store" do |create_store_method| +shared_examples "feature_store" do |create_store_method| let(:feature0) { { diff --git a/spec/integrations_helpers_spec.rb b/spec/integrations_helpers_spec.rb new file mode 100644 index 00000000..24404a72 --- /dev/null +++ b/spec/integrations_helpers_spec.rb @@ -0,0 +1,276 @@ +require "spec_helper" + +describe LaunchDarkly::Integrations::Helpers::CachingStoreWrapper do + subject { LaunchDarkly::Integrations::Helpers::CachingStoreWrapper } + + THINGS = { namespace: "things" } + + shared_examples "tests" do |cached| + opts = cached ? { expiration: 30 } : { expiration: 0 } + + it "gets item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets deleted item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1, deleted: true } + itemv2 = { key: key, version: 2, deleted: false } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets missing item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + item = { key: key, version: 1 } + + expect(wrapper.get(THINGS, key)).to eq nil + + core.force_set(THINGS, item) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result + end + + it "gets all items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) + + core.force_remove(THINGS, item2[:key]) + expect(wrapper.all(THINGS)).to eq (cached ? + { item1[:key] => item1, item2[:key] => item2 } : + { item1[:key] => item1 }) + end + + it "gets all items filtering out deleted items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1, deleted: true } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) + end + + it "upserts item successfully" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv1 + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) + end + + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "deletes item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2, deleted: true } + itemv3 = { key: key, version: 3 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + wrapper.delete(THINGS, key, 2) + expect(core.data[THINGS][key]).to eq itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) + end + end + + context "cached" do + include_examples "tests", true + + cached_opts = { expiration: 30 } + + it "get uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.get(THINGS, item1[:key])).to eq item1 + end + + it "get all uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) + end + + it "upsert doesn't update cache if unsuccessful" do + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore.new + wrapper = subject.new(core, cached_opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same + + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "initialized? can cache false result" do + core = MockCore.new + wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + sleep(0.5) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + # From this point on it should remain true and the method should not be called + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + end + + context "uncached" do + include_examples "tests", false + + uncached_opts = { expiration: 0 } + + it "queries internal initialized state only if not already inited" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + core.inited = false + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + + it "does not query internal initialized state if init has been called" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + wrapper.init({}) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 1 + end + end + + class MockCore + def initialize + @data = {} + @inited = false + @inited_query_count = 0 + end + + attr_reader :data + attr_reader :inited_query_count + attr_accessor :inited + + def force_set(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + @data[kind][item[:key]] = item + end + + def force_remove(kind, key) + @data[kind].delete(key) if @data.has_key?(kind) + end + + def init_internal(all_data) + @data = all_data + @inited = true + end + + def get_internal(kind, key) + items = @data[kind] + items.nil? ? nil : items[key] + end + + def get_all_internal(kind) + @data[kind] + end + + def upsert_internal(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + old_item = @data[kind][item[:key]] + return old_item if !old_item.nil? && old_item[:version] >= item[:version] + @data[kind][item[:key]] = item + item + end + + def initialized_internal? + @inited_query_count = @inited_query_count + 1 + @inited + end + end +end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index d27cdb39..d5ccfb65 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -10,12 +10,12 @@ $null_log.level = ::Logger::FATAL -def create_redis_store() - LaunchDarkly::RedisFeatureStore.new(prefix: $my_prefix, logger: $null_log, expiration: 60) +def create_redis_store(opts = {}) + LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 60 })) end -def create_redis_store_uncached() - LaunchDarkly::RedisFeatureStore.new(prefix: $my_prefix, logger: $null_log, expiration: 0) +def create_redis_store_uncached(opts = {}) + LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 0 })) end @@ -32,9 +32,10 @@ def create_redis_store_uncached() include_examples "feature_store", method(:create_redis_store_uncached) end - def add_concurrent_modifier(store, other_client, flag, start_version, end_version) + def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) + test_hook = Object.new version_counter = start_version - expect(store).to receive(:before_update_transaction) { |base_key, key| + expect(test_hook).to receive(:before_update_transaction) { |base_key, key| if version_counter <= end_version new_flag = flag.clone new_flag[:version] = version_counter @@ -42,18 +43,18 @@ def add_concurrent_modifier(store, other_client, flag, start_version, end_versio version_counter = version_counter + 1 end }.at_least(:once) + test_hook end it "handles upsert race condition against external client with lower version" do - store = create_redis_store other_client = Redis.new({ url: "redis://localhost:6379" }) + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) + store = create_redis_store({ test_hook: test_hook }) begin - flag = { key: "foo", version: 1 } store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - add_concurrent_modifier(store, other_client, flag, 2, 4) - my_ver = { key: "foo", version: 10 } store.upsert(LaunchDarkly::FEATURES, my_ver) result = store.get(LaunchDarkly::FEATURES, flag[:key]) @@ -64,15 +65,14 @@ def add_concurrent_modifier(store, other_client, flag, start_version, end_versio end it "handles upsert race condition against external client with higher version" do - store = create_redis_store other_client = Redis.new({ url: "redis://localhost:6379" }) + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) + store = create_redis_store({ test_hook: test_hook }) begin - flag = { key: "foo", version: 1 } store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - add_concurrent_modifier(store, other_client, flag, 3, 3) - my_ver = { key: "foo", version: 2 } store.upsert(LaunchDarkly::FEATURES, my_ver) result = store.get(LaunchDarkly::FEATURES, flag[:key]) From 4d34bc4811d1ee4ae7c65aa58e5d9c6e0ec0b28a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 17:53:42 -0800 Subject: [PATCH 048/292] fix method reference --- lib/ldclient-rb/integrations.rb | 6 +++--- lib/ldclient-rb/redis_store.rb | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 21910b09..8f806fbb 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -9,7 +9,7 @@ module Redis # # @return [String] the default Redis URL # - def default_redis_url + def self.default_redis_url 'redis://localhost:6379/0' end @@ -18,7 +18,7 @@ def default_redis_url # # @return [String] the default key prefix # - def default_prefix + def self.default_prefix 'launchdarkly' end @@ -40,7 +40,7 @@ def default_prefix # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def new_feature_store(opts) + def self.new_feature_store(opts) return RedisFeatureStore.new(opts) end end diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 99912f5f..6a429ce0 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -36,13 +36,13 @@ def initialize(opts) @redis_opts[:url] = opts[:redis_url] end if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis.default_redis_url + @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url end max_connections = opts[:max_connections] || 16 @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do Redis.new(@redis_opts) end - @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis.default_prefix + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -184,14 +184,14 @@ def initialize(opts = {}) # running at `localhost` with its default port. # def self.default_redis_url - LaunchDarkly::Integrations::Redis.default_redis_url + LaunchDarkly::Integrations::Redis::default_redis_url end # # Default value for the `prefix` constructor parameter. # def self.default_prefix - LaunchDarkly::Integrations::Redis.default_prefix + LaunchDarkly::Integrations::Redis::default_prefix end def get(kind, key) From 19182adce2bfa73a4e7fb9fd7edccd604edc7ac6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:14:04 -0800 Subject: [PATCH 049/292] misc cleanup --- lib/ldclient-rb/integrations.rb | 32 ++-- lib/ldclient-rb/interfaces.rb | 2 +- lib/ldclient-rb/redis_store.rb | 4 +- spec/integrations_helpers_spec.rb | 276 ------------------------------ 4 files changed, 25 insertions(+), 289 deletions(-) delete mode 100644 spec/integrations_helpers_spec.rb diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 8f806fbb..4d49d1c4 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,7 +1,13 @@ require "concurrent/atomics" module LaunchDarkly + # + # Tools for connecting the LaunchDarkly client to other software. + # module Integrations + # + # Integration with [Redis](https://redis.io/). + # module Redis # # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of @@ -45,7 +51,10 @@ def self.new_feature_store(opts) end end - module Helpers + # + # Support code that may be useful for integrations. + # + module Util # # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} # pattern that delegates part of its behavior to another object, while providing optional caching @@ -59,10 +68,6 @@ module Helpers class CachingStoreWrapper include LaunchDarkly::Interfaces::FeatureStore - INITED_CACHE_KEY = "$inited" - - private_constant :INITED_CACHE_KEY - # # Creates a new store wrapper instance. # @@ -75,8 +80,8 @@ def initialize(core, opts) @core = core expiration_seconds = opts[:expiration] || 15 - capacity = opts[:capacity] || 1000 if expiration_seconds > 0 + capacity = opts[:capacity] || 1000 @cache = ExpiringCache.new(capacity, expiration_seconds) else @cache = nil @@ -146,10 +151,10 @@ def initialized? if @cache.nil? result = @core.initialized_internal? else - result = @cache[INITED_CACHE_KEY] + result = @cache[inited_cache_key] if result.nil? result = @core.initialized_internal? - @cache[INITED_CACHE_KEY] = result + @cache[inited_cache_key] = result end end @@ -163,12 +168,19 @@ def stop private + # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + # The result of a call to get_all_internal is cached using the "kind" object as a key. def all_cache_key(kind) kind end - def item_cache_key(kind, key) - kind[:namespace] + ":" + key.to_s + # The result of initialized_internal? is cached using this key. + def inited_cache_key + "$inited" end def item_if_not_deleted(item) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 09e7797d..6226cbe1 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -20,7 +20,7 @@ module Interfaces # # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new - # implementation, see {LaunchDarkly::Integrations::Helpers} for tools that can make this task + # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # module FeatureStore diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 6a429ce0..97cec272 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -12,7 +12,7 @@ module LaunchDarkly # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # - # @deprecated Use {LaunchDarkly::Integrations::Redis#new_feature_store} instead. This specific + # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may change in the future. # class RedisFeatureStore @@ -176,7 +176,7 @@ def initialize(opts = {}) end @core = RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Helpers::CachingStoreWrapper.new(@core, opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(@core, opts) end # diff --git a/spec/integrations_helpers_spec.rb b/spec/integrations_helpers_spec.rb deleted file mode 100644 index 24404a72..00000000 --- a/spec/integrations_helpers_spec.rb +++ /dev/null @@ -1,276 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::Integrations::Helpers::CachingStoreWrapper do - subject { LaunchDarkly::Integrations::Helpers::CachingStoreWrapper } - - THINGS = { namespace: "things" } - - shared_examples "tests" do |cached| - opts = cached ? { expiration: 30 } : { expiration: 0 } - - it "gets item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq itemv1 - - core.force_set(THINGS, itemv2) - expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet - end - - it "gets deleted item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1, deleted: true } - itemv2 = { key: key, version: 2, deleted: false } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true - - core.force_set(THINGS, itemv2) - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet - end - - it "gets missing item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - item = { key: key, version: 1 } - - expect(wrapper.get(THINGS, key)).to eq nil - - core.force_set(THINGS, item) - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result - end - - it "gets all items" do - core = MockCore.new - wrapper = subject.new(core, opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - core.force_set(THINGS, item1) - core.force_set(THINGS, item2) - expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) - - core.force_remove(THINGS, item2[:key]) - expect(wrapper.all(THINGS)).to eq (cached ? - { item1[:key] => item1, item2[:key] => item2 } : - { item1[:key] => item1 }) - end - - it "gets all items filtering out deleted items" do - core = MockCore.new - wrapper = subject.new(core, opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1, deleted: true } - - core.force_set(THINGS, item1) - core.force_set(THINGS, item2) - expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) - end - - it "upserts item successfully" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - wrapper.upsert(THINGS, itemv1) - expect(core.data[THINGS][key]).to eq itemv1 - - wrapper.upsert(THINGS, itemv2) - expect(core.data[THINGS][key]).to eq itemv2 - - # if we have a cache, verify that the new item is now cached by writing a different value - # to the underlying data - Get should still return the cached item - if cached - itemv3 = { key: key, version: 3 } - core.force_set(THINGS, itemv3) - end - - expect(wrapper.get(THINGS, key)).to eq itemv2 - end - - it "deletes item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2, deleted: true } - itemv3 = { key: key, version: 3 } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq itemv1 - - wrapper.delete(THINGS, key, 2) - expect(core.data[THINGS][key]).to eq itemv2 - - core.force_set(THINGS, itemv3) # make a change that bypasses the cache - - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) - end - end - - context "cached" do - include_examples "tests", true - - cached_opts = { expiration: 30 } - - it "get uses values from init" do - core = MockCore.new - wrapper = subject.new(core, cached_opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) - core.force_remove(THINGS, item1[:key]) - - expect(wrapper.get(THINGS, item1[:key])).to eq item1 - end - - it "get all uses values from init" do - core = MockCore.new - wrapper = subject.new(core, cached_opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) - core.force_remove(THINGS, item1[:key]) - - expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) - end - - it "upsert doesn't update cache if unsuccessful" do - # This is for an upsert where the data in the store has a higher version. In an uncached - # store, this is just a no-op as far as the wrapper is concerned so there's nothing to - # test here. In a cached store, we need to verify that the cache has been refreshed - # using the data that was found in the store. - core = MockCore.new - wrapper = subject.new(core, cached_opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - wrapper.upsert(THINGS, itemv2) - expect(core.data[THINGS][key]).to eq itemv2 - - wrapper.upsert(THINGS, itemv1) - expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same - - itemv3 = { key: key, version: 3 } - core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache - expect(wrapper.get(THINGS, key)).to eq itemv2 - end - - it "initialized? can cache false result" do - core = MockCore.new - wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - core.inited = true - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - sleep(0.5) - - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - - # From this point on it should remain true and the method should not be called - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - end - end - - context "uncached" do - include_examples "tests", false - - uncached_opts = { expiration: 0 } - - it "queries internal initialized state only if not already inited" do - core = MockCore.new - wrapper = subject.new(core, uncached_opts) - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - core.inited = true - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - - core.inited = false - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - end - - it "does not query internal initialized state if init has been called" do - core = MockCore.new - wrapper = subject.new(core, uncached_opts) - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - wrapper.init({}) - - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 1 - end - end - - class MockCore - def initialize - @data = {} - @inited = false - @inited_query_count = 0 - end - - attr_reader :data - attr_reader :inited_query_count - attr_accessor :inited - - def force_set(kind, item) - @data[kind] = {} if !@data.has_key?(kind) - @data[kind][item[:key]] = item - end - - def force_remove(kind, key) - @data[kind].delete(key) if @data.has_key?(kind) - end - - def init_internal(all_data) - @data = all_data - @inited = true - end - - def get_internal(kind, key) - items = @data[kind] - items.nil? ? nil : items[key] - end - - def get_all_internal(kind) - @data[kind] - end - - def upsert_internal(kind, item) - @data[kind] = {} if !@data.has_key?(kind) - old_item = @data[kind][item[:key]] - return old_item if !old_item.nil? && old_item[:version] >= item[:version] - @data[kind][item[:key]] = item - item - end - - def initialized_internal? - @inited_query_count = @inited_query_count + 1 - @inited - end - end -end From 5941638a33c7ecf703a565eabab0584871da8670 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:23:17 -0800 Subject: [PATCH 050/292] comment --- lib/ldclient-rb/integrations.rb | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 4d49d1c4..2df5e04c 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -240,6 +240,9 @@ def get_all_internal(kind) # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. + # # @param kind [Object] the kind of entity to add or update # @param item [Hash] the entity to add or update # @return [Hash] the entity as it now exists in the store after the update From b4cf610105cba3f2e540d5c933b4826bb8a85b77 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:25:04 -0800 Subject: [PATCH 051/292] re-add test file --- spec/integrations_util_spec.rb | 276 +++++++++++++++++++++++++++++++++ 1 file changed, 276 insertions(+) create mode 100644 spec/integrations_util_spec.rb diff --git a/spec/integrations_util_spec.rb b/spec/integrations_util_spec.rb new file mode 100644 index 00000000..e7890802 --- /dev/null +++ b/spec/integrations_util_spec.rb @@ -0,0 +1,276 @@ +require "spec_helper" + +describe LaunchDarkly::Integrations::Util::CachingStoreWrapper do + subject { LaunchDarkly::Integrations::Util::CachingStoreWrapper } + + THINGS = { namespace: "things" } + + shared_examples "tests" do |cached| + opts = cached ? { expiration: 30 } : { expiration: 0 } + + it "gets item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets deleted item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1, deleted: true } + itemv2 = { key: key, version: 2, deleted: false } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets missing item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + item = { key: key, version: 1 } + + expect(wrapper.get(THINGS, key)).to eq nil + + core.force_set(THINGS, item) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result + end + + it "gets all items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) + + core.force_remove(THINGS, item2[:key]) + expect(wrapper.all(THINGS)).to eq (cached ? + { item1[:key] => item1, item2[:key] => item2 } : + { item1[:key] => item1 }) + end + + it "gets all items filtering out deleted items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1, deleted: true } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) + end + + it "upserts item successfully" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv1 + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) + end + + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "deletes item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2, deleted: true } + itemv3 = { key: key, version: 3 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + wrapper.delete(THINGS, key, 2) + expect(core.data[THINGS][key]).to eq itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) + end + end + + context "cached" do + include_examples "tests", true + + cached_opts = { expiration: 30 } + + it "get uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.get(THINGS, item1[:key])).to eq item1 + end + + it "get all uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) + end + + it "upsert doesn't update cache if unsuccessful" do + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore.new + wrapper = subject.new(core, cached_opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same + + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "initialized? can cache false result" do + core = MockCore.new + wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + sleep(0.5) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + # From this point on it should remain true and the method should not be called + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + end + + context "uncached" do + include_examples "tests", false + + uncached_opts = { expiration: 0 } + + it "queries internal initialized state only if not already inited" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + core.inited = false + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + + it "does not query internal initialized state if init has been called" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + wrapper.init({}) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 1 + end + end + + class MockCore + def initialize + @data = {} + @inited = false + @inited_query_count = 0 + end + + attr_reader :data + attr_reader :inited_query_count + attr_accessor :inited + + def force_set(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + @data[kind][item[:key]] = item + end + + def force_remove(kind, key) + @data[kind].delete(key) if @data.has_key?(kind) + end + + def init_internal(all_data) + @data = all_data + @inited = true + end + + def get_internal(kind, key) + items = @data[kind] + items.nil? ? nil : items[key] + end + + def get_all_internal(kind) + @data[kind] + end + + def upsert_internal(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + old_item = @data[kind][item[:key]] + return old_item if !old_item.nil? && old_item[:version] >= item[:version] + @data[kind][item[:key]] = item + item + end + + def initialized_internal? + @inited_query_count = @inited_query_count + 1 + @inited + end + end +end From 3f9ef3b9166832d2f3bb551d348822697d4a7d38 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 19:05:59 -0800 Subject: [PATCH 052/292] misc cleanup --- lib/ldclient-rb.rb | 3 +- lib/ldclient-rb/impl.rb | 10 + .../impl/integrations/redis_impl.rb | 153 ++++++++++ lib/ldclient-rb/integrations.rb | 262 +----------------- lib/ldclient-rb/integrations/redis.rb | 48 ++++ .../integrations/util/store_wrapper.rb | 222 +++++++++++++++ lib/ldclient-rb/interfaces.rb | 3 + lib/ldclient-rb/redis_store.rb | 153 +--------- .../store_wrapper_spec.rb} | 0 9 files changed, 454 insertions(+), 400 deletions(-) create mode 100644 lib/ldclient-rb/impl.rb create mode 100644 lib/ldclient-rb/impl/integrations/redis_impl.rb create mode 100644 lib/ldclient-rb/integrations/redis.rb create mode 100644 lib/ldclient-rb/integrations/util/store_wrapper.rb rename spec/{integrations_util_spec.rb => integrations/store_wrapper_spec.rb} (100%) diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index a1d7ffd9..e355a304 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -17,7 +17,6 @@ require "ldclient-rb/non_blocking_thread_pool" require "ldclient-rb/event_summarizer" require "ldclient-rb/events" -require "ldclient-rb/redis_store" -require "ldclient-rb/integrations" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" +require "ldclient-rb/integrations" diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb new file mode 100644 index 00000000..85079baf --- /dev/null +++ b/lib/ldclient-rb/impl.rb @@ -0,0 +1,10 @@ + +module LaunchDarkly + # + # Low-level implementation classes. Everything in this module should be considered non-public + # and subject to change with any release. + # + module Impl + # code is in ldclient-rb/impl/ + end +end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb new file mode 100644 index 00000000..325b936e --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -0,0 +1,153 @@ +require "concurrent/atomics" +require "json" + +require "ldclient-rb/integrations/util/store_wrapper" +require "ldclient-rb/redis_store" # eventually that file should be moved inside this one + +module LaunchDarkly + module Impl + module Integrations + module Redis + # + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # + class RedisFeatureStoreCore + begin + require "redis" + require "connection_pool" + REDIS_ENABLED = true + rescue ScriptError, StandardError + REDIS_ENABLED = false + end + + def initialize(opts) + if !REDIS_ENABLED + raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + end + + @redis_opts = opts[:redis_opts] || Hash.new + if opts[:redis_url] + @redis_opts[:url] = opts[:redis_url] + end + if !@redis_opts.include?(:url) + @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url + end + max_connections = opts[:max_connections] || 16 + @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do + ::Redis.new(@redis_opts) + end + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix + @logger = opts[:logger] || Config.default_logger + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + + @stopped = Concurrent::AtomicBoolean.new(false) + + with_connection do |redis| + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + and prefix: #{@prefix}") + end + end + + def init_internal(all_data) + count = 0 + with_connection do |redis| + all_data.each do |kind, items| + redis.multi do |multi| + multi.del(items_key(kind)) + count = count + items.count + items.each { |key, item| + redis.hset(items_key(kind), key, item.to_json) + } + end + end + end + @logger.info { "RedisFeatureStore: initialized with #{count} items" } + end + + def get_internal(kind, key) + with_connection do |redis| + get_redis(redis, kind, key) + end + end + + def get_all_internal(kind) + fs = {} + with_connection do |redis| + hashfs = redis.hgetall(items_key(kind)) + hashfs.each do |k, json_item| + f = JSON.parse(json_item, symbolize_names: true) + fs[k.to_sym] = f + end + end + fs + end + + def upsert_internal(kind, new_item) + base_key = items_key(kind) + key = new_item[:key] + try_again = true + final_item = new_item + while try_again + try_again = false + with_connection do |redis| + redis.watch(base_key) do + old_item = get_redis(redis, kind, key) + before_update_transaction(base_key, key) + if old_item.nil? || old_item[:version] < new_item[:version] + result = redis.multi do |multi| + multi.hset(base_key, key, new_item.to_json) + end + if result.nil? + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + try_again = true + end + else + final_item = old_item + action = new_item[:deleted] ? "delete" : "update" + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + end + redis.unwatch + end + end + end + final_item + end + + def initialized_internal? + with_connection { |redis| redis.exists(items_key(FEATURES)) } + end + + def stop + if @stopped.make_true + @pool.shutdown { |redis| redis.close } + end + end + + private + + def before_update_transaction(base_key, key) + @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + end + + def items_key(kind) + @prefix + ":" + kind[:namespace] + end + + def cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def with_connection + @pool.with { |redis| yield(redis) } + end + + def get_redis(redis, kind, key) + json_item = redis.hget(items_key(kind), key) + json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 2df5e04c..02b2d435 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,4 +1,4 @@ -require "concurrent/atomics" +require "ldclient-rb/integrations/redis" module LaunchDarkly # @@ -8,265 +8,19 @@ module Integrations # # Integration with [Redis](https://redis.io/). # + # @since 5.5.0 + # module Redis - # - # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of - # Redis running at `localhost` with its default port. - # - # @return [String] the default Redis URL - # - def self.default_redis_url - 'redis://localhost:6379/0' - end - - # - # Default value for the `prefix` option for {new_feature_store}. - # - # @return [String] the default key prefix - # - def self.default_prefix - 'launchdarkly' - end - - # - # Creates a Redis-backed persistent feature store. - # - # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). - # - # @param opts [Hash] the configuration options - # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) - # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly - # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching - # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @option opts [Object] :pool custom connection pool, if desired - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object - # - def self.new_feature_store(opts) - return RedisFeatureStore.new(opts) - end + # code is in ldclient-rb/impl/integrations/redis_impl end # - # Support code that may be useful for integrations. + # Support code that may be helpful in creating integrations. + # + # @since 5.5.0 # module Util - # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} - # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every feature store implementation. - # This makes it easier to create new database integrations by implementing only the database-specific - # logic. - # - # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner - # implementation object. - # - class CachingStoreWrapper - include LaunchDarkly::Interfaces::FeatureStore - - # - # Creates a new store wrapper instance. - # - # @param core [Object] an object that implements the {FeatureStoreCore} methods - # @param opts [Hash] a hash that may include cache-related options; all others will be ignored - # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching - # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # - def initialize(core, opts) - @core = core - - expiration_seconds = opts[:expiration] || 15 - if expiration_seconds > 0 - capacity = opts[:capacity] || 1000 - @cache = ExpiringCache.new(capacity, expiration_seconds) - else - @cache = nil - end - - @inited = Concurrent::AtomicBoolean.new(false) - end - - def init(all_data) - @core.init_internal(all_data) - @inited.make_true - - if !@cache.nil? - @cache.clear - all_data.each do |kind, items| - @cache[kind] = items_if_not_deleted(items) - items.each do |key, item| - @cache[item_cache_key(kind, key)] = [item] - end - end - end - end - - def get(kind, key) - if !@cache.nil? - cache_key = item_cache_key(kind, key) - cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values - return item_if_not_deleted(cached[0]) if !cached.nil? - end - - item = @core.get_internal(kind, key) - - if !@cache.nil? - @cache[cache_key] = [item] - end - - item_if_not_deleted(item) - end - - def all(kind) - if !@cache.nil? - items = @cache[all_cache_key(kind)] - return items if !items.nil? - end - - items = items_if_not_deleted(@core.get_all_internal(kind)) - @cache[all_cache_key(kind)] = items if !@cache.nil? - items - end - - def upsert(kind, item) - new_state = @core.upsert_internal(kind, item) - - if !@cache.nil? - @cache[item_cache_key(kind, item[:key])] = [new_state] - @cache.delete(all_cache_key(kind)) - end - end - - def delete(kind, key, version) - upsert(kind, { key: key, version: version, deleted: true }) - end - - def initialized? - return true if @inited.value - - if @cache.nil? - result = @core.initialized_internal? - else - result = @cache[inited_cache_key] - if result.nil? - result = @core.initialized_internal? - @cache[inited_cache_key] = result - end - end - - @inited.make_true if result - result - end - - def stop - @core.stop - end - - private - - # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. - def item_cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - # The result of a call to get_all_internal is cached using the "kind" object as a key. - def all_cache_key(kind) - kind - end - - # The result of initialized_internal? is cached using this key. - def inited_cache_key - "$inited" - end - - def item_if_not_deleted(item) - (item.nil? || item[:deleted]) ? nil : item - end - - def items_if_not_deleted(items) - items.select { |key, item| !item[:deleted] } - end - end - - # - # This module describes the methods that you must implement on your own object in order to - # use {CachingStoreWrapper}. - # - module FeatureStoreCore - # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, - # but the wrapper will take care of updating the cache if caching is enabled. - # - # @param all_data [Hash] a hash where each key is one of the data kind objects, and each - # value is in turn a hash of string keys to entities - # - def init_internal(all_data) - end - - # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} - # except that 1. the wrapper will take care of filtering out deleted entities by checking the - # `:deleted` property, so you can just return exactly what was in the data store, and 2. the - # wrapper will take care of checking and updating the cache if caching is enabled. - # - # @param kind [Object] the kind of entity to get - # @param key [String] the unique key of the entity to get - # @return [Hash] the entity; nil if the key was not found - # - def get_internal(kind, key) - end - - # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} - # except that 1. the wrapper will take care of filtering out deleted entities by checking the - # `:deleted` property, so you can just return exactly what was in the data store, and 2. the - # wrapper will take care of checking and updating the cache if caching is enabled. - # - # @param kind [Object] the kind of entity to get - # @return [Hash] a hash where each key is the entity's `:key` property and each value - # is the entity - # - def get_all_internal(kind) - end - - # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} - # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. - # the method is expected to return the final state of the entity (i.e. either the `item` - # parameter if the update succeeded, or the previously existing entity in the store if the - # update failed; this is used for the caching logic). - # - # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} - # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. - # - # @param kind [Object] the kind of entity to add or update - # @param item [Hash] the entity to add or update - # @return [Hash] the entity as it now exists in the store after the update - # - def upsert_internal(kind, item) - end - - # - # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern - # for efficiency, because the wrapper will use caching and memoization in order to call the method - # as little as possible. - # - # @return [Boolean] true if the store is in an initialized state - # - def initialized_internal? - end - - # - # Performs any necessary cleanup to shut down the store when the client is being shut down. - # - def stop - end - end + # code is in ldclient-rb/integrations/util/ end end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb new file mode 100644 index 00000000..54221f76 --- /dev/null +++ b/lib/ldclient-rb/integrations/redis.rb @@ -0,0 +1,48 @@ +require "ldclient-rb/impl/integrations/redis_impl" + +module LaunchDarkly + module Integrations + module Redis + # + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Redis running at `localhost` with its default port. + # + # @return [String] the default Redis URL + # + def self.default_redis_url + 'redis://localhost:6379/0' + end + + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def self.default_prefix + 'launchdarkly' + end + + # + # Creates a Redis-backed persistent feature store. + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(opts) + return RedisFeatureStore.new(opts) + end + end + end +end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb new file mode 100644 index 00000000..58ecb2c4 --- /dev/null +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -0,0 +1,222 @@ +require "concurrent/atomics" + +require "ldclient-rb/expiring_cache" + +module LaunchDarkly + module Integrations + module Util + # + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # pattern that delegates part of its behavior to another object, while providing optional caching + # behavior and other logic that would otherwise be repeated in every feature store implementation. + # This makes it easier to create new database integrations by implementing only the database-specific + # logic. + # + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # implementation object. + # + class CachingStoreWrapper + include LaunchDarkly::Interfaces::FeatureStore + + # + # Creates a new store wrapper instance. + # + # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param opts [Hash] a hash that may include cache-related options; all others will be ignored + # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # + def initialize(core, opts) + @core = core + + expiration_seconds = opts[:expiration] || 15 + if expiration_seconds > 0 + capacity = opts[:capacity] || 1000 + @cache = ExpiringCache.new(capacity, expiration_seconds) + else + @cache = nil + end + + @inited = Concurrent::AtomicBoolean.new(false) + end + + def init(all_data) + @core.init_internal(all_data) + @inited.make_true + + if !@cache.nil? + @cache.clear + all_data.each do |kind, items| + @cache[kind] = items_if_not_deleted(items) + items.each do |key, item| + @cache[item_cache_key(kind, key)] = [item] + end + end + end + end + + def get(kind, key) + if !@cache.nil? + cache_key = item_cache_key(kind, key) + cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values + return item_if_not_deleted(cached[0]) if !cached.nil? + end + + item = @core.get_internal(kind, key) + + if !@cache.nil? + @cache[cache_key] = [item] + end + + item_if_not_deleted(item) + end + + def all(kind) + if !@cache.nil? + items = @cache[all_cache_key(kind)] + return items if !items.nil? + end + + items = items_if_not_deleted(@core.get_all_internal(kind)) + @cache[all_cache_key(kind)] = items if !@cache.nil? + items + end + + def upsert(kind, item) + new_state = @core.upsert_internal(kind, item) + + if !@cache.nil? + @cache[item_cache_key(kind, item[:key])] = [new_state] + @cache.delete(all_cache_key(kind)) + end + end + + def delete(kind, key, version) + upsert(kind, { key: key, version: version, deleted: true }) + end + + def initialized? + return true if @inited.value + + if @cache.nil? + result = @core.initialized_internal? + else + result = @cache[inited_cache_key] + if result.nil? + result = @core.initialized_internal? + @cache[inited_cache_key] = result + end + end + + @inited.make_true if result + result + end + + def stop + @core.stop + end + + private + + # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + # The result of a call to get_all_internal is cached using the "kind" object as a key. + def all_cache_key(kind) + kind + end + + # The result of initialized_internal? is cached using this key. + def inited_cache_key + "$inited" + end + + def item_if_not_deleted(item) + (item.nil? || item[:deleted]) ? nil : item + end + + def items_if_not_deleted(items) + items.select { |key, item| !item[:deleted] } + end + end + + # + # This module describes the methods that you must implement on your own object in order to + # use {CachingStoreWrapper}. + # + module FeatureStoreCore + # + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # but the wrapper will take care of updating the cache if caching is enabled. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init_internal(all_data) + end + + # + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found + # + def get_internal(kind, key) + end + + # + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def get_all_internal(kind) + end + + # + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. + # the method is expected to return the final state of the entity (i.e. either the `item` + # parameter if the update succeeded, or the previously existing entity in the store if the + # update failed; this is used for the caching logic). + # + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [Hash] the entity as it now exists in the store after the update + # + def upsert_internal(kind, item) + end + + # + # Checks whether this store has been initialized. This is the same as + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # for efficiency, because the wrapper will use caching and memoization in order to call the method + # as little as possible. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized_internal? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 6226cbe1..510e1636 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -1,5 +1,8 @@ module LaunchDarkly + # + # Mixins that define the required methods of various pluggable components used by the client. + # module Interfaces # # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 97cec272..32a9507d 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,9 @@ require "concurrent/atomics" require "json" +require "ldclient-rb/interfaces" +require "ldclient-rb/impl/integrations/redis_impl" + module LaunchDarkly # # An implementation of the LaunchDarkly client's feature store that uses a Redis @@ -16,146 +19,12 @@ module LaunchDarkly # implementation class may change in the future. # class RedisFeatureStore - begin - require "redis" - require "connection_pool" - REDIS_ENABLED = true - rescue ScriptError, StandardError - REDIS_ENABLED = false - end - include LaunchDarkly::Interfaces::FeatureStore - # - # Internal implementation of the Redis feature store. We put a CachingStoreWrapper around this. - # - class RedisFeatureStoreCore - def initialize(opts) - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - Redis.new(@redis_opts) - end - @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix - @logger = opts[:logger] || Config.default_logger - @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented - - @stopped = Concurrent::AtomicBoolean.new(false) - - with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ - and prefix: #{@prefix}") - end - end - - def init_internal(all_data) - count = 0 - with_connection do |redis| - all_data.each do |kind, items| - redis.multi do |multi| - multi.del(items_key(kind)) - count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } - end - end - end - @logger.info { "RedisFeatureStore: initialized with #{count} items" } - end - - def get_internal(kind, key) - with_connection do |redis| - get_redis(redis, kind, key) - end - end - - def get_all_internal(kind) - fs = {} - with_connection do |redis| - hashfs = redis.hgetall(items_key(kind)) - hashfs.each do |k, json_item| - f = JSON.parse(json_item, symbolize_names: true) - fs[k.to_sym] = f - end - end - fs - end - - def upsert_internal(kind, new_item) - base_key = items_key(kind) - key = new_item[:key] - try_again = true - final_item = new_item - while try_again - try_again = false - with_connection do |redis| - redis.watch(base_key) do - old_item = get_redis(redis, kind, key) - before_update_transaction(base_key, key) - if old_item.nil? || old_item[:version] < new_item[:version] - result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) - end - if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } - try_again = true - end - else - final_item = old_item - action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ -in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } - end - redis.unwatch - end - end - end - final_item - end - - def initialized_internal? - with_connection { |redis| redis.exists(items_key(FEATURES)) } - end - - def stop - if @stopped.make_true - @pool.shutdown { |redis| redis.close } - end - end - - private - - # exposed for testing - def before_update_transaction(base_key, key) - @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? - end - - def items_key(kind) - @prefix + ":" + kind[:namespace] - end - - def cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - def with_connection - @pool.with { |redis| yield(redis) } - end - - def get_redis(redis, kind, key) - json_item = redis.hget(items_key(kind), key) - json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) - end - end - - private_constant :RedisFeatureStoreCore + # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating + # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate + # away from exposing these concrete classes and use factory methods instead. # # Constructor for a RedisFeatureStore instance. @@ -171,12 +40,8 @@ def get_redis(redis, kind, key) # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) - if !REDIS_ENABLED - raise RuntimeError.new("can't use RedisFeatureStore because one of these gems is missing: redis, connection_pool") - end - - @core = RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(@core, opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end # diff --git a/spec/integrations_util_spec.rb b/spec/integrations/store_wrapper_spec.rb similarity index 100% rename from spec/integrations_util_spec.rb rename to spec/integrations/store_wrapper_spec.rb From fa831f9a3fc6db3bf1eabff4030eaa13ae11d03c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 19:18:27 -0800 Subject: [PATCH 053/292] misc cleanup --- lib/ldclient-rb/impl/integrations/redis_impl.rb | 3 --- lib/ldclient-rb/integrations.rb | 1 + lib/ldclient-rb/integrations/redis.rb | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 325b936e..497b01c5 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -1,9 +1,6 @@ require "concurrent/atomics" require "json" -require "ldclient-rb/integrations/util/store_wrapper" -require "ldclient-rb/redis_store" # eventually that file should be moved inside this one - module LaunchDarkly module Impl module Integrations diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 02b2d435..c48074a0 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,4 +1,5 @@ require "ldclient-rb/integrations/redis" +require "ldclient-rb/integrations/util/store_wrapper" module LaunchDarkly # diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 54221f76..b81097c6 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -1,4 +1,4 @@ -require "ldclient-rb/impl/integrations/redis_impl" +require "ldclient-rb/redis_store" # eventually we will just refer to impl/integrations/redis_impl directly module LaunchDarkly module Integrations From ea68da433cc5eaeaeac8c557364c94a20a21d93f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:33:52 -0800 Subject: [PATCH 054/292] initial DynamoDB implementation --- .circleci/config.yml | 11 + ldclient-rb.gemspec | 1 + .../impl/integrations/dynamodb_impl.rb | 231 ++++++++++++++++++ lib/ldclient-rb/integrations.rb | 10 + lib/ldclient-rb/integrations/dynamodb.rb | 31 +++ .../dynamodb_feature_store_spec.rb | 77 ++++++ 6 files changed, 361 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/dynamodb_impl.rb create mode 100644 lib/ldclient-rb/integrations/dynamodb.rb create mode 100644 spec/integrations/dynamodb_feature_store_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..f19ae7bc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -34,26 +34,31 @@ jobs: docker: - image: circleci/ruby:2.2.9-jessie - image: redis + - image: amazon/dynamodb-local test-2.3: <<: *ruby-docker-template docker: - image: circleci/ruby:2.3.6-jessie - image: redis + - image: amazon/dynamodb-local test-2.4: <<: *ruby-docker-template docker: - image: circleci/ruby:2.4.4-stretch - image: redis + - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: - image: circleci/ruby:2.5.1-stretch - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - image: circleci/jruby:9-jdk - image: redis + - image: amazon/dynamodb-local # The following very slow job uses an Ubuntu container to run the Ruby versions that # CircleCI doesn't provide Docker images for. @@ -63,8 +68,11 @@ jobs: environment: - RUBIES: "jruby-9.1.17.0" steps: + - run: sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - run: sudo apt-get -q update - run: sudo apt-get -qy install redis-server + - run: sudo apt-cache policy docker-ce + - run: sudo apt-get -qy install docker-ce - checkout - run: name: install all Ruby versions @@ -84,6 +92,9 @@ jobs: bundle install; mv Gemfile.lock "Gemfile.lock.$i" done + - run: + command: docker run -p 8000:8000 amazon/dynamodb-local + background: true - run: name: run tests for all versions shell: /bin/bash -leo pipefail diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 0b8f4f9d..8b1f4cc7 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -21,6 +21,7 @@ Gem::Specification.new do |spec| spec.require_paths = ["lib"] spec.extensions = 'ext/mkrf_conf.rb' + spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" spec.add_development_dependency "rspec", "~> 3.2" spec.add_development_dependency "codeclimate-test-reporter", "~> 0" diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb new file mode 100644 index 00000000..8eb1dd2a --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -0,0 +1,231 @@ +require "concurrent/atomics" +require "json" + +module LaunchDarkly + module Impl + module Integrations + module DynamoDB + # + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # + class DynamoDBFeatureStoreCore + begin + require "aws-sdk-dynamodb" + AWS_SDK_ENABLED = true + rescue ScriptError, StandardError + begin + require "aws-sdk" + AWS_SDK_ENABLED = true + rescue ScriptError, StandardError + AWS_SDK_ENABLED = false + end + end + + PARTITION_KEY = "namespace" + SORT_KEY = "key" + + VERSION_ATTRIBUTE = "version" + ITEM_JSON_ATTRIBUTE = "item" + + def initialize(table_name, opts) + if !AWS_SDK_ENABLED + raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + end + + @table_name = table_name + @prefix = opts[:prefix] + @logger = opts[:logger] || Config.default_logger + + @stopped = Concurrent::AtomicBoolean.new(false) + + if !opts[:existing_client].nil? + @client = opts[:existing_client] + else + @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts]) + end + + @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + end + + def init_internal(all_data) + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = read_existing_keys(all_data.keys) + + requests = [] + num_items = 0 + + # Insert or update every provided item + all_data.each do |kind, items| + items.values.each do |item| + requests.push({ put_request: { item: marshal_item(kind, item) } }) + unused_old_keys.delete([ namespace_for_kind(kind), item[:key] ]) + num_items = num_items + 1 + end + end + + # Now delete any previously existing items whose keys were not in the current data + unused_old_keys.each do |tuple| + del_item = make_keys_hash(tuple[0], tuple[1]) + requests.push({ delete_request: { key: del_item } }) + end + + # Now set the special key that we check in initialized_internal? + inited_item = make_keys_hash(inited_key, inited_key) + requests.push({ put_request: { item: inited_item } }) + + DynamoDBUtil.batch_write_requests(@client, @table_name, requests) + + @logger.info { "Initialized table #{@table_name} with #{num_items} items" } + end + + def get_internal(kind, key) + resp = get_item_by_keys(namespace_for_kind(kind), key) + unmarshal_item(resp.item) + end + + def get_all_internal(kind) + items_out = {} + req = make_query_for_kind(kind) + while true + resp = @client.query(req) + resp.items.each do |item| + item_out = unmarshal_item(item) + items_out[item_out[:key].to_sym] = item_out + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + items_out + end + + def upsert_internal(kind, new_item) + encoded_item = marshal_item(kind, new_item) + begin + @client.put_item({ + table_name: @table_name, + item: encoded_item, + condition_expression: "attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version", + expression_attribute_names: { + "#namespace" => PARTITION_KEY, + "#key" => SORT_KEY, + "#version" => VERSION_ATTRIBUTE + }, + expression_attribute_values: { + ":version" => new_item[:version] + } + }) + new_item + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException + # The item was not updated because there's a newer item in the database. + # We must now read the item that's in the database and return it, so CachingStoreWrapper can cache it. + get_internal(kind, new_item[:key]) + end + end + + def initialized_internal? + resp = get_item_by_keys(inited_key, inited_key) + !resp.item.nil? && resp.item.length > 0 + end + + def stop + # AWS client doesn't seem to have a close method + end + + private + + def prefixed_namespace(base_str) + (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}" + end + + def namespace_for_kind(kind) + prefixed_namespace(kind[:namespace]) + end + + def inited_key + prefixed_namespace("$inited") + end + + def make_keys_hash(namespace, key) + { + PARTITION_KEY => namespace, + SORT_KEY => key + } + end + + def make_query_for_kind(kind) + { + table_name: @table_name, + consistent_read: true, + key_conditions: { + PARTITION_KEY => { + comparison_operator: "EQ", + attribute_value_list: [ namespace_for_kind(kind) ] + } + } + } + end + + def get_item_by_keys(namespace, key) + @client.get_item({ + table_name: @table_name, + key: make_keys_hash(namespace, key) + }) + end + + def read_existing_keys(kinds) + keys = Set.new + kinds.each do |kind| + req = make_query_for_kind(kind).merge({ + projection_expression: "#namespace, #key", + expression_attribute_names: { + "#namespace" => PARTITION_KEY, + "#key" => SORT_KEY + } + }) + while true + resp = @client.query(req) + resp.items.each do |item| + namespace = item[PARTITION_KEY] + key = item[SORT_KEY] + keys.add([ namespace, key ]) + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + end + keys + end + + def marshal_item(kind, item) + make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ + VERSION_ATTRIBUTE => item[:version], + ITEM_JSON_ATTRIBUTE => item.to_json + }) + end + + def unmarshal_item(item) + return nil if item.nil? || item.length == 0 + json_attr = item[ITEM_JSON_ATTRIBUTE] + raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? + JSON.parse(json_attr, symbolize_names: true) + end + end + + class DynamoDBUtil + # + # Calls client.batch_write_item as many times as necessary to submit all of the given requests. + # The requests array is consumed. + # + def self.batch_write_requests(client, table, requests) + batch_size = 25 + while true + chunk = requests.shift(batch_size) + break if chunk.empty? + client.batch_write_item({ request_items: { table => chunk } }) + end + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index c48074a0..029c4243 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/util/store_wrapper" @@ -6,6 +7,15 @@ module LaunchDarkly # Tools for connecting the LaunchDarkly client to other software. # module Integrations + # + # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). + # + # @since 5.5.0 + # + module DynamoDB + # code is in ldclient-rb/impl/integrations/dynamodb_impl + end + # # Integration with [Redis](https://redis.io/). # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb new file mode 100644 index 00000000..553f54e9 --- /dev/null +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -0,0 +1,31 @@ +require "ldclient-rb/impl/integrations/dynamodb_impl" +require "ldclient-rb/integrations/util/store_wrapper" + +module LaunchDarkly + module Integrations + module DynamoDB + # + # Creates a DynamoDB-backed persistent feature store. + # + # To use this method, you must first have the `aws_sdk` gem installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) + return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + end + end +end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb new file mode 100644 index 00000000..4a0e3cbf --- /dev/null +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -0,0 +1,77 @@ +require "feature_store_spec_base" +require "aws-sdk-dynamodb" +require "spec_helper" + + +$table_name = 'LD_DYNAMODB_TEST_TABLE' +$endpoint = 'http://localhost:8000' +$my_prefix = 'testprefix' +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + +$dynamodb_opts = { + credentials: Aws::Credentials.new("key", "secret"), + region: "us-east-1", + endpoint: $endpoint +} + +$base_opts = { + dynamodb_opts: $dynamodb_opts, + prefix: $my_prefix, + logger: $null_log +} + +def create_dynamodb_store(opts = {}) + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + opts.merge($base_opts).merge({ expiration: 60 })) +end + +def create_dynamodb_store_uncached(opts = {}) + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + opts.merge($base_opts).merge({ expiration: 0 })) +end + +def create_table_if_necessary + client = create_test_client + begin + client.describe_table({ table_name: $table_name }) + return # no error, table exists + rescue Blahbhbhba + # fall through to code below - we'll create the table + end + + req = { + table_name: $table_name, + key_schema: [ + { attribute_name: "namespace", key_type: "HASH" }, + { attribute_name: "key", key_type: "RANGE" } + ], + attribute_definitions: [ + { attribute_name: "namespace", attribute_type: "S" }, + { attribute_name: "key", attribute_type: "S" } + ] + } + client.create_table(req) + + # When DynamoDB creates a table, it may not be ready to use immediately +end + +def create_test_client + Aws::DynamoDB::Client.new($dynamodb_opts) +end + + +describe "DynamoDB feature store" do + + # These tests will all fail if there isn't a local DynamoDB instance running. + + create_table_if_necessary + + context "with local cache" do + include_examples "feature_store", method(:create_dynamodb_store) + end + + context "without local cache" do + include_examples "feature_store", method(:create_dynamodb_store_uncached) + end +end From bde227450dee5c868e099fbc5c20de7c80b272ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:56:38 -0800 Subject: [PATCH 055/292] fix exception name --- spec/integrations/dynamodb_feature_store_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 4a0e3cbf..98e32ed6 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -36,7 +36,7 @@ def create_table_if_necessary begin client.describe_table({ table_name: $table_name }) return # no error, table exists - rescue Blahbhbhba + rescue Aws::DynamoDB::Errors::ResourceNotFoundException # fall through to code below - we'll create the table end From 4e493172c97a5cbf745176167d3b4a5aec637e45 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:59:17 -0800 Subject: [PATCH 056/292] fix test setup --- spec/integrations/dynamodb_feature_store_spec.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 98e32ed6..38104fb3 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -49,7 +49,11 @@ def create_table_if_necessary attribute_definitions: [ { attribute_name: "namespace", attribute_type: "S" }, { attribute_name: "key", attribute_type: "S" } - ] + ], + provisioned_throughput: { + read_capacity_units: 1, + write_capacity_units: 1 + } } client.create_table(req) From c71bbec59a1b76f933c21f9acc7d55860d1b4303 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 11:32:30 -0800 Subject: [PATCH 057/292] comments --- lib/ldclient-rb/integrations.rb | 6 ++++++ lib/ldclient-rb/integrations/dynamodb.rb | 6 +++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 029c4243..bfaed2eb 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -10,6 +10,9 @@ module Integrations # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # + # Note that in order to use this integration, you must first install one of the AWS SDK gems: either + # `aws-sdk-dynamodb`, or the full `aws-sdk`. + # # @since 5.5.0 # module DynamoDB @@ -19,6 +22,9 @@ module DynamoDB # # Integration with [Redis](https://redis.io/). # + # Note that in order to use this integration, you must first install the `redis` and `connection-pool` + # gems. + # # @since 5.5.0 # module Redis diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 553f54e9..66d3b583 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -7,9 +7,9 @@ module DynamoDB # # Creates a DynamoDB-backed persistent feature store. # - # To use this method, you must first have the `aws_sdk` gem installed. Then, - # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). + # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or + # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property + # of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) From cfe3b188df3ef64139310bc73dce03e9891c5883 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 11:48:04 -0800 Subject: [PATCH 058/292] readme --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ead2bb6b..43819554 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,11 @@ else end ``` +Database integrations +--------------------- + +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + Using flag data from a file --------------------------- For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. @@ -153,9 +158,9 @@ About LaunchDarkly * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Python Twisted](http://docs.launchdarkly.com/docs/python-twisted-sdk-reference "LaunchDarkly Python Twisted SDK") * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") + * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") From 69cf890825ab41a5529242b0f4cb90f46bb81a5b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 12:08:41 -0800 Subject: [PATCH 059/292] fix doc comment --- lib/ldclient-rb/integrations/dynamodb.rb | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 66d3b583..c9ded019 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -12,14 +12,12 @@ module DynamoDB # of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options - # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) - # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(table_name, opts) From 321eb6eeb247764437233f8478b5ac3c1f9e6492 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 14:45:13 -0800 Subject: [PATCH 060/292] greatly improve documentation comments --- lib/ldclient-rb.rb | 7 + lib/ldclient-rb/cache_store.rb | 1 + lib/ldclient-rb/config.rb | 291 ++++++++++++------ lib/ldclient-rb/evaluation.rb | 64 +++- lib/ldclient-rb/event_summarizer.rb | 3 + lib/ldclient-rb/events.rb | 16 + lib/ldclient-rb/expiring_cache.rb | 1 + lib/ldclient-rb/file_data_source.rb | 8 +- lib/ldclient-rb/flags_state.rb | 5 +- lib/ldclient-rb/impl.rb | 2 + .../integrations/util/store_wrapper.rb | 3 + lib/ldclient-rb/interfaces.rb | 42 +++ lib/ldclient-rb/ldclient.rb | 116 ++++--- lib/ldclient-rb/memoized_value.rb | 2 + lib/ldclient-rb/newrelic.rb | 1 + lib/ldclient-rb/non_blocking_thread_pool.rb | 6 +- lib/ldclient-rb/polling.rb | 1 + lib/ldclient-rb/requestor.rb | 3 +- lib/ldclient-rb/simple_lru_cache.rb | 1 + lib/ldclient-rb/stream.rb | 8 + lib/ldclient-rb/user_filter.rb | 1 + lib/ldclient-rb/util.rb | 1 + lib/sse_client/sse_client.rb | 7 + scripts/gendocs.sh | 9 + 24 files changed, 446 insertions(+), 153 deletions(-) create mode 100755 scripts/gendocs.sh diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index e355a304..e5477ecb 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,3 +1,10 @@ + +# +# Namespace for the LaunchDarkly Ruby SDK. +# +module LaunchDarkly +end + require "ldclient-rb/version" require "ldclient-rb/interfaces" require "ldclient-rb/util" diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 0677da65..a0a50fbf 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -7,6 +7,7 @@ module LaunchDarkly # # @see https://github.com/plataformatec/faraday-http-cache # @see https://github.com/ruby-concurrency/thread_safe + # @private # class ThreadSafeMemoryStore # diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index dc89d30a..e16e998a 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -8,66 +8,35 @@ module LaunchDarkly # # class Config + # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity + # # Constructor for creating custom LaunchDarkly configurations. # # @param opts [Hash] the configuration options - # @option opts [Logger] :logger A logger to use for messages from the - # LaunchDarkly client. Defaults to the Rails logger in a Rails - # environment, or stdout otherwise. - # @option opts [String] :base_uri ("https://app.launchdarkly.com") The base - # URL for the LaunchDarkly server. Most users should use the default value. - # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") The - # URL for the LaunchDarkly streaming events server. Most users should use the default value. - # @option opts [String] :events_uri ("https://events.launchdarkly.com") The - # URL for the LaunchDarkly events server. Most users should use the default value. - # @option opts [Integer] :capacity (10000) The capacity of the events - # buffer. The client buffers up to this many events in memory before - # flushing. If the capacity is exceeded before the buffer is flushed, - # events will be discarded. - # @option opts [Float] :flush_interval (30) The number of seconds between - # flushes of the event buffer. - # @option opts [Float] :read_timeout (10) The read timeout for network - # connections in seconds. - # @option opts [Float] :connect_timeout (2) The connect timeout for network - # connections in seconds. - # @option opts [Object] :cache_store A cache store for the Faraday HTTP caching - # library. Defaults to the Rails cache in a Rails environment, or a - # thread-safe in-memory store otherwise. - # @option opts [Object] :feature_store A store for feature flags and related data. Defaults to an in-memory - # cache, or you can use RedisFeatureStore. - # @option opts [Boolean] :use_ldd (false) Whether you are using the LaunchDarkly relay proxy in - # daemon mode. In this configuration, the client will not use a streaming connection to listen - # for updates, but instead will get feature state from a Redis instance. The `stream` and - # `poll_interval` options will be ignored if this option is set to true. - # @option opts [Boolean] :offline (false) Whether the client should be initialized in - # offline mode. In offline mode, default values are returned for all flags and no - # remote network requests are made. - # @option opts [Float] :poll_interval (30) The number of seconds between polls for flag updates - # if streaming is off. - # @option opts [Boolean] :stream (true) Whether or not the streaming API should be used to receive flag updates. - # Streaming should only be disabled on the advice of LaunchDarkly support. - # @option opts [Boolean] all_attributes_private (false) If true, all user attributes (other than the key) - # will be private, not just the attributes specified in `private_attribute_names`. - # @option opts [Array] :private_attribute_names Marks a set of attribute names private. Any users sent to - # LaunchDarkly with this configuration active will have attributes with these names removed. - # @option opts [Boolean] :send_events (true) Whether or not to send events back to LaunchDarkly. - # This differs from `offline` in that it affects only the sending of client-side events, not - # streaming or polling for events from the server. - # @option opts [Integer] :user_keys_capacity (1000) The number of user keys that the event processor - # can remember at any one time, so that duplicate user details will not be sent in analytics events. - # @option opts [Float] :user_keys_flush_interval (300) The interval in seconds at which the event - # processor will reset its set of known user keys. - # @option opts [Boolean] :inline_users_in_events (false) Whether to include full user details in every - # analytics event. By default, events will only include the user key, except for one "index" event - # that provides the full details for the user. - # @option opts [Object] :update_processor (DEPRECATED) An object that will receive feature flag data from - # LaunchDarkly. Defaults to either the streaming or the polling processor, can be customized for tests. - # @option opts [Object] :update_processor_factory A function that takes the SDK and configuration object - # as parameters, and returns an object that can obtain feature flag data and put it into the feature - # store. Defaults to creating either the streaming or the polling processor, can be customized for tests. - # @return [type] [description] - # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity + # @option opts [Logger] :logger See {#logger}. + # @option opts [String] :base_uri ("https://app.launchdarkly.com") See {#base_uri}. + # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") See {#stream_uri}. + # @option opts [String] :events_uri ("https://events.launchdarkly.com") See {#events_uri}. + # @option opts [Integer] :capacity (10000) See {#capacity}. + # @option opts [Float] :flush_interval (30) See {#flush_interval}. + # @option opts [Float] :read_timeout (10) See {#read_timeout}. + # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. + # @option opts [Object] :cache_store See {#cache_store}. + # @option opts [Object] :feature_store See {#feature_store}. + # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. + # @option opts [Boolean] :offline (false) See {#offline?}. + # @option opts [Float] :poll_interval (30) See {#poll_interval}. + # @option opts [Boolean] :stream (true) See {#stream?}. + # @option opts [Boolean] all_attributes_private (false) See {#all_attributes_private}. + # @option opts [Array] :private_attribute_names See {#private_attribute_names}. + # @option opts [Boolean] :send_events (true) See {#send_events}. + # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. + # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. + # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. + # @option opts [Object] :update_processor See {#update_processor}. + # @option opts [Object] :update_processor_factory See {#update_processor_factory}. + # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @stream_uri = (opts[:stream_uri] || Config.default_stream_uri).chomp("/") @@ -95,43 +64,56 @@ def initialize(opts = {}) end # - # The base URL for the LaunchDarkly server. + # The base URL for the LaunchDarkly server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly server. attr_reader :base_uri # - # The base URL for the LaunchDarkly streaming server. + # The base URL for the LaunchDarkly streaming server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly streaming server. attr_reader :stream_uri # - # The base URL for the LaunchDarkly events server. + # The base URL for the LaunchDarkly events server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly events server. attr_reader :events_uri # # Whether streaming mode should be enabled. Streaming mode asynchronously updates - # feature flags in real-time using server-sent events. + # feature flags in real-time using server-sent events. Streaming is enabled by default, and + # should only be disabled on the advice of LaunchDarkly support. + # @return [Boolean] # - # @return [Boolean] True if streaming mode should be enabled def stream? @stream end # - # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, we do - # not use polling or streaming to get feature flag updates from the server, but instead - # read them from a Redis instance that is updated by the proxy. + # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not + # use polling or streaming to get feature flag updates from the server, but instead reads them + # from the {#feature_store feature store}, which is assumed to be a database that is populated by + # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) + # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # All other properties related to streaming or polling are ignored if this option is set to true. + # + # @return [Boolean] # - # @return [Boolean] True if using the LaunchDarkly relay proxy in daemon mode def use_ldd? @use_ldd end - # TODO docs + # + # Whether the client should be initialized in offline mode. In offline mode, default values are + # returned for all flags and no remote network requests are made. + # @return [Boolean] + # def offline? @offline end @@ -139,20 +121,23 @@ def offline? # # The number of seconds between flushes of the event buffer. Decreasing the flush interval means # that the event buffer is less likely to reach capacity. + # @return [Float] # - # @return [Float] The configured number of seconds between flushes of the event buffer. attr_reader :flush_interval # # The number of seconds to wait before polling for feature flag updates. This option has no - # effect unless streaming is disabled + # effect unless streaming is disabled. + # @return [Float] + # attr_reader :poll_interval # # The configured logger for the LaunchDarkly client. The client library uses the log to - # print warning and error messages. + # print warning and error messages. If not specified, this defaults to the Rails logger + # in a Rails environment, or stdout otherwise. + # @return [Logger] # - # @return [Logger] The configured logger attr_reader :logger # @@ -161,114 +146,208 @@ def offline? # the buffer is flushed, events will be discarded. # Increasing the capacity means that events are less likely to be discarded, # at the cost of consuming more memory. + # @return [Integer] # - # @return [Integer] The configured capacity of the event buffer attr_reader :capacity # - # The store for the Faraday HTTP caching library. Stores should respond to - # 'read' and 'write' requests. + # A store for HTTP caching. This must support the semantics used by the + # [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem. Defaults + # to the Rails cache in a Rails environment, or a thread-safe in-memory store otherwise. + # @return [Object] # - # @return [Object] The configured store for the Faraday HTTP caching library. attr_reader :cache_store # - # The read timeout for network connections in seconds. + # The read timeout for network connections in seconds. This does not apply to the streaming + # connection, which uses a longer timeout since the server does not send data constantly. + # @return [Float] # - # @return [Float] The read timeout in seconds. attr_reader :read_timeout # # The connect timeout for network connections in seconds. + # @return [Float] # - # @return [Float] The connect timeout in seconds. attr_reader :connect_timeout # - # A store for feature flag configuration rules. + # A store for feature flags and related data. The client uses it to store all data received + # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to + # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. + # + # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # @return [LaunchDarkly::Interfaces::FeatureStore] # attr_reader :feature_store - # The proxy configuration string + # + # The proxy configuration string. + # @return [String] # attr_reader :proxy + # + # True if all user attributes (other than the key) should be considered private. This means + # that the attribute values will not be sent to LaunchDarkly in analytics events and will not + # appear on the LaunchDarkly dashboard. + # @return [Boolean] + # @see #private_attribute_names + # attr_reader :all_attributes_private + # + # A list of user attribute names that should always be considered private. This means that the + # attribute values will not be sent to LaunchDarkly in analytics events and will not appear on + # the LaunchDarkly dashboard. + # + # You can also specify the same behavior for an individual flag evaluation by storing an array + # of attribute names in the `:privateAttributeNames` property (note camelcase name) of the + # user object. + # + # @return [Array] + # @see #all_attributes_private + # attr_reader :private_attribute_names # - # Whether to send events back to LaunchDarkly. + # Whether to send events back to LaunchDarkly. This differs from {#offline?} in that it affects + # only the sending of client-side events, not streaming or polling for events from the server. + # @return [Boolean] # attr_reader :send_events # - # The number of user keys that the event processor can remember at any one time, so that - # duplicate user details will not be sent in analytics events. + # The number of user keys that the event processor can remember at any one time. This reduces the + # amount of duplicate user details sent in analytics events. + # @return [Integer] + # @see #user_keys_flush_interval # attr_reader :user_keys_capacity # # The interval in seconds at which the event processor will reset its set of known user keys. + # @return [Float] + # @see #user_keys_capacity # attr_reader :user_keys_flush_interval # - # Whether to include full user details in every - # analytics event. By default, events will only include the user key, except for one "index" event - # that provides the full details for the user. + # Whether to include full user details in every analytics event. By default, events will only + # include the user key, except for one "index" event that provides the full details for the user. + # The only reason to change this is if you are using the Analytics Data Stream. + # @return [Boolean] # attr_reader :inline_users_in_events + # + # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, + # the client uses its standard polling or streaming implementation; this is customizable for + # testing purposes. + # @return [LaunchDarkly::Interfaces::UpdateProcessor] + # @deprecated The preferred way to set this is now with {#update_processor_factory}. + # attr_reader :update_processor + # + # Factory for an object that is responsible for receiving feature flag data from LaunchDarkly + # By default, the client uses its standard polling or streaming implementation; this is + # customizable for testing purposes. + # + # The factory is a lambda or Proc that takes two parameters: the SDK key and the {Config}. It + # must return an object that conforms to {LaunchDarkly::Interfaces::UpdateProcessor}. + # + # @return [lambda] + # @see FileDataSource + # attr_reader :update_processor_factory - + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. - # # @return [Config] The default LaunchDarkly configuration. + # def self.default Config.new end + # + # The default value for {#capacity}. + # @return [Integer] 10000 + # def self.default_capacity 10000 end + # + # The default value for {#base_uri}. + # @return [String] "https://app.launchdarkly.com" + # def self.default_base_uri "https://app.launchdarkly.com" end + # + # The default value for {#stream_uri}. + # @return [String] "https://stream.launchdarkly.com" + # def self.default_stream_uri "https://stream.launchdarkly.com" end + # + # The default value for {#events_uri}. + # @return [String] "https://events.launchdarkly.com" + # def self.default_events_uri "https://events.launchdarkly.com" end + # + # The default value for {#cache_store}. + # @return [Object] the Rails cache if in Rails, or a simple in-memory implementation otherwise + # def self.default_cache_store defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : ThreadSafeMemoryStore.new end + # + # The default value for {#flush_interval}. + # @return [Float] 10 + # def self.default_flush_interval 10 end + # + # The default value for {#read_timeout}. + # @return [Float] 10 + # def self.default_read_timeout 10 end + # + # The default value for {#connect_timeout}. + # @return [Float] 10 + # def self.default_connect_timeout 2 end + # + # The default value for {#proxy}. + # @return [String] nil + # def self.default_proxy nil end + # + # The default value for {#logger}. + # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise + # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) Rails.logger @@ -279,34 +358,66 @@ def self.default_logger end end + # + # The default value for {#stream?}. + # @return [Boolean] true + # def self.default_stream true end + # + # The default value for {#use_ldd?}. + # @return [Boolean] false + # def self.default_use_ldd false end + # + # The default value for {#feature_store}. + # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} + # def self.default_feature_store InMemoryFeatureStore.new end + # + # The default value for {#offline?}. + # @return [Boolean] false + # def self.default_offline false end + # + # The default value for {#poll_interval}. + # @return [Float] 30 + # def self.default_poll_interval 30 end + # + # The default value for {#send_events}. + # @return [Boolean] true + # def self.default_send_events true end + # + # The default value for {#user_keys_capacity}. + # @return [Integer] 1000 + # def self.default_user_keys_capacity 1000 end + # + # The default value for {#user_keys_flush_interval}. + # @return [Float] 300 + # def self.default_user_keys_flush_interval 300 end diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f73eb1ed..f873a6e3 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -2,7 +2,7 @@ require "semantic" module LaunchDarkly - # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with + # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail def initialize(value, variation_index, reason) @@ -11,19 +11,66 @@ def initialize(value, variation_index, reason) @reason = reason end - # @return [Object] The result of the flag evaluation. This will be either one of the flag's - # variations or the default value that was passed to the `variation` method. + # + # The result of the flag evaluation. This will be either one of the flag's variations, or the + # default value that was passed to {LDClient#variation_detail}. It is the same as the return + # value of {LDClient#variation}. + # + # @return [Object] + # attr_reader :value - # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. - # 0 for the first variation - or `nil` if the default value was returned. + # + # The index of the returned value within the flag's list of variations. The first variation is + # 0, the second is 1, etc. This is `nil` if the default value was returned. + # + # @return [int|nil] + # attr_reader :variation_index - # @return [Hash] An object describing the main factor that influenced the flag evaluation value. + # + # An object describing the main factor that influenced the flag evaluation value. + # + # This object is currently represented as a Hash, which may have the following keys: + # + # `:kind`: The general category of reason. Possible values: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation + # * `'ERROR'`: the flag could not be evaluated, so the default value was returned + # + # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the + # matched rule (0 for the first rule). + # + # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. + # + # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of + # the prerequisite flag that failed. + # + # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: + # + # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had + # successfully initialized + # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag + # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a + # rule specified a nonexistent variation + # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied + # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation + # + # @return [Hash] + # attr_reader :reason - # @return [boolean] True if the flag evaluated to the default value rather than to one of its - # variations. + # + # Tests whether the flag evaluation returned a default value. This is the same as checking + # whether {#variation_index} is nil. + # + # @return [Boolean] + # def default_value? variation_index.nil? end @@ -33,6 +80,7 @@ def ==(other) end end + # @private module Evaluation BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] diff --git a/lib/ldclient-rb/event_summarizer.rb b/lib/ldclient-rb/event_summarizer.rb index 1c55b524..c48a400f 100644 --- a/lib/ldclient-rb/event_summarizer.rb +++ b/lib/ldclient-rb/event_summarizer.rb @@ -1,11 +1,14 @@ module LaunchDarkly + # @private EventSummary = Struct.new(:start_date, :end_date, :counters) # Manages the state of summarizable information for the EventProcessor, including the # event counters and user deduplication. Note that the methods of this class are # deliberately not thread-safe; the EventProcessor is responsible for enforcing # synchronization across both the summarizer and the event queue. + # + # @private class EventSummarizer def initialize clear diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index e19d6b02..cbae5ac5 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -9,6 +9,10 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 + private_constant :MAX_FLUSH_WORKERS + private_constant :CURRENT_SCHEMA_VERSION + + # @private class NullEventProcessor def add_event(event) end @@ -20,6 +24,7 @@ def stop end end + # @private class EventMessage def initialize(event) @event = event @@ -27,12 +32,15 @@ def initialize(event) attr_reader :event end + # @private class FlushMessage end + # @private class FlushUsersMessage end + # @private class SynchronousMessage def initialize @reply = Concurrent::Semaphore.new(0) @@ -47,12 +55,15 @@ def wait_for_completion end end + # @private class TestSyncMessage < SynchronousMessage end + # @private class StopMessage < SynchronousMessage end + # @private class EventProcessor def initialize(sdk_key, config, client = nil) @queue = Queue.new @@ -99,6 +110,7 @@ def wait_until_inactive end end + # @private class EventDispatcher def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @@ -252,8 +264,10 @@ def handle_response(res) end end + # @private FlushPayload = Struct.new(:events, :summary) + # @private class EventBuffer def initialize(capacity, logger) @capacity = capacity @@ -290,6 +304,7 @@ def clear end end + # @private class EventPayloadSendTask def run(sdk_key, config, client, payload, formatter) events_out = formatter.make_output_events(payload.events, payload.summary) @@ -327,6 +342,7 @@ def run(sdk_key, config, client, payload, formatter) end end + # @private class EventOutputFormatter def initialize(config) @inline_users = config.inline_users_in_events diff --git a/lib/ldclient-rb/expiring_cache.rb b/lib/ldclient-rb/expiring_cache.rb index 6d8c48f8..fa6051c9 100644 --- a/lib/ldclient-rb/expiring_cache.rb +++ b/lib/ldclient-rb/expiring_cache.rb @@ -6,6 +6,7 @@ module LaunchDarkly # * made thread-safe # * removed many unused methods # * reading a key does not reset its expiration time, only writing + # @private class ExpiringCache def initialize(max_size, ttl) @max_size = max_size diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index da80f26a..120276fc 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -7,12 +7,15 @@ module LaunchDarkly # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' # gem has been provided by the host app. + # @private @@have_listen = false begin require 'listen' @@have_listen = true rescue LoadError end + + # @private def self.have_listen? @@have_listen end @@ -45,7 +48,7 @@ def self.have_listen? # to request existing flags directly from the LaunchDarkly server in JSON format, and use this # output as the starting point for your file. In Linux you would do this: # - # curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all # # The output will look something like this (but with many more properties): # @@ -92,6 +95,8 @@ def self.have_listen? # duplicate key-- it will not load flags from any of the files. # class FileDataSource + include LaunchDarkly::Interfaces::UpdateProcessor + # # Returns a factory for the file data source component. # @@ -116,6 +121,7 @@ def self.factory(options={}) end end + # @private class FileDataSourceImpl def initialize(feature_store, logger, options={}) @feature_store = feature_store diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index b761149c..4efe1404 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -3,8 +3,8 @@ module LaunchDarkly # # A snapshot of the state of all feature flags with regard to a specific user, generated by - # calling the client's all_flags_state method. Serializing this object to JSON using - # JSON.generate (or the to_json method) will produce the appropriate data structure for + # calling the {LDClient#all_flags_state}. Serializing this object to JSON using + # `JSON.generate` (or the `to_json` method) will produce the appropriate data structure for # bootstrapping the LaunchDarkly JavaScript client. # class FeatureFlagsState @@ -15,6 +15,7 @@ def initialize(valid) end # Used internally to build the state map. + # @private def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) key = flag[:key] @flag_values[key] = value diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 85079baf..3df0d7e3 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -4,6 +4,8 @@ module LaunchDarkly # Low-level implementation classes. Everything in this module should be considered non-public # and subject to change with any release. # + # @since 5.5.0 + # module Impl # code is in ldclient-rb/impl/ end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 58ecb2c4..46a648c1 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -153,6 +153,7 @@ module FeatureStoreCore # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities + # @return [void] # def init_internal(all_data) end @@ -214,6 +215,8 @@ def initialized_internal? # # Performs any necessary cleanup to shut down the store when the client is being shut down. # + # @return [void] + # def stop end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 510e1636..c9c38cfe 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -35,6 +35,7 @@ module FeatureStore # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities + # @return [void] # def init(all_data) end @@ -67,6 +68,7 @@ def all(kind) # # @param kind [Object] the kind of entity to add or update # @param item [Hash] the entity to add or update + # @return [void] # def upsert(kind, item) end @@ -79,6 +81,7 @@ def upsert(kind, item) # @param kind [Object] the kind of entity to delete # @param key [String] the unique key of the entity # @param version [Integer] the entity must have a lower version than this to be deleted + # @return [void] # def delete(kind, key, version) end @@ -98,6 +101,45 @@ def initialized? # # Performs any necessary cleanup to shut down the store when the client is being shut down. # + # @return [void] + # + def stop + end + end + + # + # Mixin that defines the required methods of an update processor implementation. This is + # the component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. + # + # The client has its own standard implementation, which uses either a streaming connection or + # polling depending on your configuration. Normally you will not need to use another one + # except for testing purposes. {FileDataSource} provides one such test fixture. + # + module UpdateProcessor + # + # Checks whether the processor has finished initializing. Initialization is considered done + # once it has received one complete data set from LaunchDarkly. + # + # @return [Boolean] true if initialization is complete + # + def initialized? + end + + # + # Puts the processor into an active state. Normally this means it will make its first + # connection attempt to LaunchDarkly. If `start` has already been called, calling it again + # should simply return the same value as the first call. + # + # @return [Concurrent::Event] an Event which will be set once initialization is complete + # + def start + end + + # + # Puts the processor into an inactive state and releases all of its resources. + # This state should be considered permanent (`start` does not have to work after `stop`). + # def stop end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f8a75780..ffd82084 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -10,7 +10,6 @@ module LaunchDarkly # A client for LaunchDarkly. Client instances are thread-safe. Users # should create a single client instance for the lifetime of the application. # - # class LDClient include Evaluation # @@ -18,7 +17,6 @@ class LDClient # configuration parameter can also supplied to specify advanced options, # but for most use cases, the default configuration is appropriate. # - # # @param sdk_key [String] the SDK key for your LaunchDarkly account # @param config [Config] an optional client configuration object # @@ -57,15 +55,41 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) end end + # + # Tells the client that all pending analytics events should be delivered as soon as possible. + # + # When the LaunchDarkly client generates analytics events (from {#variation}, {#variation_detail}, + # {#identify}, or {#track}), they are queued on a worker thread. The event thread normally + # sends all queued events to LaunchDarkly at regular intervals, controlled by the + # {Config#flush_interval} option. Calling `flush` triggers a send without waiting for the + # next interval. + # + # Flushing is asynchronous, so this method will return before it is complete. However, if you + # call {#close}, events are guaranteed to be sent before that method returns. + # def flush @event_processor.flush end - def toggle?(key, user, default = False) + # + # @param key [String] the feature flag key + # @param user [Hash] the user properties + # @param default [Boolean] (false) the value to use if the flag cannot be evaluated + # @return [Boolean] the flag value + # @deprecated Use {#variation} instead. + # + def toggle?(key, user, default = false) @config.logger.warn { "[LDClient] toggle? is deprecated. Use variation instead" } variation(key, user, default) end + # + # Creates a hash string that can be used by the JavaScript SDK to identify a user. + # For more information, see ["Secure mode"](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # + # @param user [Hash] the user properties + # @return [String] a hash string + # def secure_mode_hash(user) OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) end @@ -78,13 +102,13 @@ def initialized? # # Determines the variation of a feature flag to present to a user. At a minimum, - # the user hash should contain a +:key+ . + # the user hash should contain a `:key`. # # @example Basic user hash # {key: "user@example.com"} # - # For authenticated users, the +:key+ should be the unique identifier for - # your user. For anonymous users, the +:key+ should be a session identifier + # For authenticated users, the `:key` should be the unique identifier for + # your user. For anonymous users, the `:key` should be a session identifier # or cookie. In either case, the only requirement is that the key # is unique to a user. # @@ -93,7 +117,7 @@ def initialized? # @example More complete user hash # {key: "user@example.com", ip: "127.0.0.1", country: "US"} # - # The user hash can contain arbitrary custom attributes stored in a +:custom+ sub-hash: + # The user hash can contain arbitrary custom attributes stored in a `:custom` sub-hash: # # @example A user hash with custom attributes # {key: "user@example.com", custom: {customer_rank: 1000, groups: ["google", "microsoft"]}} @@ -113,66 +137,61 @@ def variation(key, user, default) end # - # Determines the variation of a feature flag for a user, like `variation`, but also + # Determines the variation of a feature flag for a user, like {#variation}, but also # provides additional information about how this value was calculated. # - # The return value of `variation_detail` is an `EvaluationDetail` object, which has - # three properties: - # - # `value`: the value that was calculated for this user (same as the return value - # of `variation`) - # - # `variation_index`: the positional index of this value in the flag, e.g. 0 for the - # first variation - or `nil` if the default value was returned - # - # `reason`: a hash describing the main reason why this value was selected. Its `:kind` - # property will be one of the following: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules; the `:ruleIndex` and - # `:ruleId` properties indicate the positional index and unique identifier of the rule - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation; the - # `:prerequisiteKey` property indicates the key of the prerequisite that failed - # * `'ERROR'`: the flag could not be evaluated, e.g. because it does not exist or due - # to an unexpected error, and therefore returned the default value; the `:errorKind` - # property describes the nature of the error, such as `'FLAG_NOT_FOUND'` + # The return value of `variation_detail` is an {EvaluationDetail} object, which has + # three properties: the result value, the positional index of this value in the flag's + # list of variations, and an object describing the main reason why this value was + # selected. See {EvaluationDetail} for more on these properties. # - # The `reason` will also be included in analytics events, if you are capturing - # detailed event data for this flag. + # Calling `variation_detail` instead of `variation` also causes the "reason" data to + # be included in analytics events, if you are capturing detailed event data for this flag. # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag # @param default the default value of the flag # - # @return an `EvaluationDetail` object describing the result + # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) evaluate_internal(key, user, default, true) end # - # Registers the user + # Registers the user. This method simply creates an analytics event containing the user + # properties, so that LaunchDarkly will know about that user if it does not already. # - # @param [Hash] The user to register + # Calling {#variation} or {#variation_detail} also sends the user information to + # LaunchDarkly (if events are enabled), so you only need to use {#identify} if you + # want to identify the user without evaluating a flag. # + # Note that event delivery is asynchronous, so the event may not actually be sent + # until later; see {#flush}. + # + # @param user [Hash] The user to register; this can have all the same user properties + # described in {#variation} # @return [void] + # def identify(user) sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end # - # Tracks that a user performed an event + # Tracks that a user performed an event. This method creates a "custom" analytics event + # containing the specified event name (key), user properties, and optional data. + # + # Note that event delivery is asynchronous, so the event may not actually be sent + # until later; see {#flush}. # # @param event_name [String] The name of the event - # @param user [Hash] The user that performed the event. This should be the same user hash used in calls to {#toggle?} + # @param user [Hash] The user to register; this can have all the same user properties + # described in {#variation} # @param data [Hash] A hash containing any additional data associated with the event - # # @return [void] + # def track(event_name, user, data) sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) @@ -181,7 +200,7 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics - # events correctly if you pass the result of all_flags. + # events correctly if you pass the result of `all_flags`. # # @param user [Hash] The end user requesting the feature flags # @return [Hash] a hash of feature flag keys to values @@ -191,21 +210,21 @@ def all_flags(user) end # - # Returns a FeatureFlagsState object that encapsulates the state of all feature flags for a given user, + # Returns a {FeatureFlagsState} object that encapsulates the state of all feature flags for a given user, # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # # @param user [Hash] The end user requesting the feature flags - # @param options={} [Hash] Optional parameters to control how the state is generated + # @param options [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included - # in the state (see `variation_detail`). By default, they are not included. + # in the state (see {#variation_detail}). By default, they are not included. # @option options [Boolean] :details_only_for_tracked_flags (false) True if any flag metadata that is - # normally only used for event generation - such as flag versions and evaluation reasons - should be - # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size - # of the JSON data if you are passing the flag state to the front end. - # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON + # normally only used for event generation - such as flag versions and evaluation reasons - should be + # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size + # of the JSON data if you are passing the flag state to the front end. + # @return [FeatureFlagsState] a {FeatureFlagsState} object which can be serialized to JSON # def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? @@ -246,7 +265,7 @@ def all_flags_state(user, options={}) end # - # Releases all network connections and other resources held by the client, making it no longer usable + # Releases all network connections and other resources held by the client, making it no longer usable. # # @return [void] def close @@ -351,6 +370,7 @@ def make_feature_event(flag, user, detail, default, with_reasons) # # Used internally when the client is offline. + # @private # class NullUpdateProcessor def start diff --git a/lib/ldclient-rb/memoized_value.rb b/lib/ldclient-rb/memoized_value.rb index 3ba766a6..ddddb7e0 100644 --- a/lib/ldclient-rb/memoized_value.rb +++ b/lib/ldclient-rb/memoized_value.rb @@ -2,6 +2,8 @@ module LaunchDarkly # Simple implementation of a thread-safe memoized value whose generator function will never be # run more than once, and whose value can be overridden by explicit assignment. + # Note that we no longer use this class and it will be removed in a future version. + # @private class MemoizedValue def initialize(&generator) @generator = generator diff --git a/lib/ldclient-rb/newrelic.rb b/lib/ldclient-rb/newrelic.rb index ed6eb4e4..5c9b7d48 100644 --- a/lib/ldclient-rb/newrelic.rb +++ b/lib/ldclient-rb/newrelic.rb @@ -1,4 +1,5 @@ module LaunchDarkly + # @private class LDNewRelic begin require "newrelic_rpm" diff --git a/lib/ldclient-rb/non_blocking_thread_pool.rb b/lib/ldclient-rb/non_blocking_thread_pool.rb index 81b7ea14..28ec42a9 100644 --- a/lib/ldclient-rb/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/non_blocking_thread_pool.rb @@ -3,10 +3,10 @@ require "concurrent/executors" require "thread" -# Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather -# than blocking. Also provides a way to wait for all jobs to finish without shutting down. - module LaunchDarkly + # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather + # than blocking. Also provides a way to wait for all jobs to finish without shutting down. + # @private class NonBlockingThreadPool def initialize(capacity) @capacity = capacity diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 4ecd93f8..4c6769f3 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -2,6 +2,7 @@ require "thread" module LaunchDarkly + # @private class PollingProcessor def initialize(config, requestor) @config = config diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 25cce121..3e244fbe 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -3,7 +3,7 @@ require "faraday/http_cache" module LaunchDarkly - + # @private class UnexpectedResponseError < StandardError def initialize(status) @status = status @@ -14,6 +14,7 @@ def status end end + # @private class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/simple_lru_cache.rb b/lib/ldclient-rb/simple_lru_cache.rb index 64b1a709..4eda4e27 100644 --- a/lib/ldclient-rb/simple_lru_cache.rb +++ b/lib/ldclient-rb/simple_lru_cache.rb @@ -2,6 +2,7 @@ module LaunchDarkly # A non-thread-safe implementation of a LRU cache set with only add and reset methods. # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb + # @private class SimpleLRUCacheSet def initialize(capacity) @values = {} diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 2151e945..660d7063 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -3,18 +3,26 @@ require "sse_client" module LaunchDarkly + # @private PUT = :put + # @private PATCH = :patch + # @private DELETE = :delete + # @private INDIRECT_PUT = :'indirect/put' + # @private INDIRECT_PATCH = :'indirect/patch' + # @private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes + # @private KEY_PATHS = { FEATURES => "/flags/", SEGMENTS => "/segments/" } + # @private class StreamProcessor def initialize(sdk_key, config, requestor) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index 449d8d2e..8cbf67ca 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -2,6 +2,7 @@ require "set" module LaunchDarkly + # @private class UserFilter def initialize(config) @all_attributes_private = config.all_attributes_private diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 707ba3ce..e303e18a 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,5 +1,6 @@ module LaunchDarkly + # @private module Util def self.log_exception(logger, message, exc) logger.error { "[LDClient] #{message}: #{exc.inspect}" } diff --git a/lib/sse_client/sse_client.rb b/lib/sse_client/sse_client.rb index 9f285360..5b7e0fd9 100644 --- a/lib/sse_client/sse_client.rb +++ b/lib/sse_client/sse_client.rb @@ -3,6 +3,13 @@ require "thread" require "uri" +# +# A lightweight Server-Sent Events implementation based on the `socketry` gem. +# +# This module will be moved to a separate gem in the future. +# +# @private +# module SSE # # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh new file mode 100755 index 00000000..6280355e --- /dev/null +++ b/scripts/gendocs.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +gem install --conservative yard +gem install --conservative redcarpet # provides Markdown formatting + +# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**" +PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb" + +yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md From 59759545c5e227f810655598f16e825b4903315e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 15:04:00 -0800 Subject: [PATCH 061/292] comment fixes --- lib/ldclient-rb/file_data_source.rb | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 120276fc..adc32ab6 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -25,8 +25,8 @@ def self.have_listen? # used in a test environment, to operate using a predetermined feature flag state without an # actual LaunchDarkly connection. # - # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # To use this component, call {FileDataSource#factory}, and store its return value in the + # {Config#update_processor_factory} property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # # factory = FileDataSource.factory(paths: [ myFilePath ]) @@ -34,21 +34,23 @@ def self.have_listen? # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled - # this with Config.send_events or Config.offline. + # this with {Config#send_events} or {Config#offline?}. # # Flag data files can be either JSON or YAML. They contain an object with three possible # properties: # - # - "flags": Feature flag definitions. - # - "flagValues": Simplified feature flags that contain only a value. - # - "segments": User segment definitions. + # - `flags`: Feature flag definitions. + # - `flagValues`: Simplified feature flags that contain only a value. + # - `segments`: User segment definitions. # - # The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application # and is subject to change. Rather than trying to construct these objects yourself, it is simpler # to request existing flags directly from the LaunchDarkly server in JSON format, and use this # output as the starting point for your file. In Linux you would do this: # - # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # ``` + # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # ``` # # The output will look something like this (but with many more properties): # @@ -95,8 +97,6 @@ def self.have_listen? # duplicate key-- it will not load flags from any of the files. # class FileDataSource - include LaunchDarkly::Interfaces::UpdateProcessor - # # Returns a factory for the file data source component. # @@ -113,6 +113,7 @@ class FileDataSource # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. + # @return an object that can be stored in {Config#update_processor_factory} # def self.factory(options={}) return Proc.new do |sdk_key, config| From 414af9957bdf1897c399d8131bcfe04d027b0c89 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 15:08:43 -0800 Subject: [PATCH 062/292] change name of "update processor" to "data source" --- lib/ldclient-rb/config.rb | 28 ++++++++++++++-------------- lib/ldclient-rb/file_data_source.rb | 12 +++++------- lib/ldclient-rb/interfaces.rb | 12 ++++++------ lib/ldclient-rb/ldclient.rb | 18 +++++++++--------- spec/file_data_source_spec.rb | 4 ++-- spec/ldclient_spec.rb | 8 ++++---- 6 files changed, 40 insertions(+), 42 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index e16e998a..64ad7378 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -34,8 +34,9 @@ class Config # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. - # @option opts [Object] :update_processor See {#update_processor}. - # @option opts [Object] :update_processor_factory See {#update_processor_factory}. + # @option opts [Object] :data_source See {#data_source}. + # @option opts [Object] :update_processor Obsolete synonym for `data_source`. + # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -59,6 +60,7 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false + @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] @update_processor = opts[:update_processor] @update_processor_factory = opts[:update_processor_factory] end @@ -245,22 +247,20 @@ def offline? # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, # the client uses its standard polling or streaming implementation; this is customizable for # testing purposes. - # @return [LaunchDarkly::Interfaces::UpdateProcessor] - # @deprecated The preferred way to set this is now with {#update_processor_factory}. # - attr_reader :update_processor - - # - # Factory for an object that is responsible for receiving feature flag data from LaunchDarkly - # By default, the client uses its standard polling or streaming implementation; this is - # customizable for testing purposes. - # - # The factory is a lambda or Proc that takes two parameters: the SDK key and the {Config}. It - # must return an object that conforms to {LaunchDarkly::Interfaces::UpdateProcessor}. + # This may be set to either an object that conforms to {LaunchDarkly::Interfaces::DataSource}, + # or a lambda (or Proc) that takes two parameters-- SDK key and {Config}-- and returns such an + # object. # - # @return [lambda] + # @return [LaunchDarkly::Interfaces::DataSource|lambda] # @see FileDataSource # + attr_reader :data_source + + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor + + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor_factory # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index adc32ab6..7606c1d3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -26,11 +26,11 @@ def self.have_listen? # actual LaunchDarkly connection. # # To use this component, call {FileDataSource#factory}, and store its return value in the - # {Config#update_processor_factory} property of your LaunchDarkly client configuration. In the options + # {Config#data_source} property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # factory = FileDataSource.factory(paths: [ myFilePath ]) - # config = LaunchDarkly::Config.new(update_processor_factory: factory) + # file_source = FileDataSource.factory(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(data_source: file_source) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled @@ -113,12 +113,10 @@ class FileDataSource # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. - # @return an object that can be stored in {Config#update_processor_factory} + # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return Proc.new do |sdk_key, config| - FileDataSourceImpl.new(config.feature_store, config.logger, options) - end + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index c9c38cfe..912472b5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -108,17 +108,17 @@ def stop end # - # Mixin that defines the required methods of an update processor implementation. This is - # the component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # Mixin that defines the required methods of a data source implementation. This is the + # component that delivers feature flag data from LaunchDarkly to the LDClient by putting # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one # except for testing purposes. {FileDataSource} provides one such test fixture. # - module UpdateProcessor + module DataSource # - # Checks whether the processor has finished initializing. Initialization is considered done + # Checks whether the data source has finished initializing. Initialization is considered done # once it has received one complete data set from LaunchDarkly. # # @return [Boolean] true if initialization is complete @@ -127,7 +127,7 @@ def initialized? end # - # Puts the processor into an active state. Normally this means it will make its first + # Puts the data source into an active state. Normally this means it will make its first # connection attempt to LaunchDarkly. If `start` has already been called, calling it again # should simply return the same value as the first call. # @@ -137,7 +137,7 @@ def start end # - # Puts the processor into an inactive state and releases all of its resources. + # Puts the data source into an inactive state and releases all of its resources. # This state should be considered permanent (`start` does not have to work after `stop`). # def stop diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ffd82084..868c65bd 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -37,19 +37,19 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) return # requestor and update processor are not used in this mode end - if @config.update_processor - @update_processor = @config.update_processor + data_source_or_factory = @config.data_source || self.method(:create_default_data_source) + if data_source_or_factory.respond_to? :call + @data_source = data_source_or_factory.call(sdk_key, config) else - factory = @config.update_processor_factory || self.method(:create_default_update_processor) - @update_processor = factory.call(sdk_key, config) + @data_source = data_source_or_factory end - ready = @update_processor.start + ready = @data_source.start if wait_for_sec > 0 ok = ready.wait(wait_for_sec) if !ok @config.logger.error { "[LDClient] Timeout encountered waiting for LaunchDarkly client initialization" } - elsif !@update_processor.initialized? + elsif !@data_source.initialized? @config.logger.error { "[LDClient] LaunchDarkly client initialization failed" } end end @@ -97,7 +97,7 @@ def secure_mode_hash(user) # Returns whether the client has been initialized and is ready to serve feature flag requests # @return [Boolean] true if the client has been initialized def initialized? - @config.offline? || @config.use_ldd? || @update_processor.initialized? + @config.offline? || @config.use_ldd? || @data_source.initialized? end # @@ -270,14 +270,14 @@ def all_flags_state(user, options={}) # @return [void] def close @config.logger.info { "[LDClient] Closing LaunchDarkly client..." } - @update_processor.stop + @data_source.stop @event_processor.stop @store.stop end private - def create_default_update_processor(sdk_key, config) + def create_default_data_source(sdk_key, config) if config.offline? return NullUpdateProcessor.new end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 60107e26..28a0c06f 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -219,7 +219,7 @@ def test_auto_reload(options) it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) - config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) begin @@ -233,7 +233,7 @@ def test_auto_reload(options) it "evaluates full flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) - config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) begin diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 262f53f9..b3a9592c 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,8 +7,8 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:update_processor) { LaunchDarkly::NullUpdateProcessor.new } - let(:config) { LaunchDarkly::Config.new({send_events: false, update_processor: update_processor}) } + let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } + let(:config) { LaunchDarkly::Config.new({send_events: false, data_source: null_data}) } let(:client) do subject.new("secret", config) end @@ -357,7 +357,7 @@ def event_processor end describe 'with send_events: false' do - let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, update_processor: update_processor}) } + let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, data_source: null_data}) } let(:client) { subject.new("secret", config) } it "uses a NullEventProcessor" do @@ -367,7 +367,7 @@ def event_processor end describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, update_processor: update_processor}) } + let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, data_source: null_data}) } let(:client_with_events) { subject.new("secret", config_with_events) } it "does not use a NullEventProcessor" do From fdb0291849c5faca7c4b8b5a644f342945b8fbb0 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 21 Dec 2018 11:37:38 -0800 Subject: [PATCH 063/292] default dynamodb_opts to {} --- lib/ldclient-rb/impl/integrations/dynamodb_impl.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 8eb1dd2a..ebaa0445 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -41,7 +41,7 @@ def initialize(table_name, opts) if !opts[:existing_client].nil? @client = opts[:existing_client] else - @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts]) + @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") From 65ee009c9cef4ae3066b5faa41b67119a9c85ba5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 21 Dec 2018 12:47:15 -0800 Subject: [PATCH 064/292] fix Unicode handling in polling requests --- lib/ldclient-rb/requestor.rb | 2 +- .../sse_shared.rb => http_util.rb} | 44 ++++++---- spec/requestor_spec.rb | 82 ++++++++++--------- spec/sse_client/sse_client_spec.rb | 24 +++++- spec/sse_client/streaming_http_spec.rb | 3 +- 5 files changed, 99 insertions(+), 56 deletions(-) rename spec/{sse_client/sse_shared.rb => http_util.rb} (56%) diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 3e244fbe..8922e82c 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -20,7 +20,7 @@ def initialize(sdk_key, config) @sdk_key = sdk_key @config = config @client = Faraday.new do |builder| - builder.use :http_cache, store: @config.cache_store + builder.use :http_cache, store: @config.cache_store, serializer: Marshal builder.adapter :net_http_persistent end diff --git a/spec/sse_client/sse_shared.rb b/spec/http_util.rb similarity index 56% rename from spec/sse_client/sse_shared.rb rename to spec/http_util.rb index 3ecabb57..434cafc8 100644 --- a/spec/sse_client/sse_shared.rb +++ b/spec/http_util.rb @@ -4,23 +4,28 @@ require "webrick/https" class StubHTTPServer + attr_reader :requests + def initialize @port = 50000 begin - @server = create_server(@port) + base_opts = { + BindAddress: '127.0.0.1', + Port: @port, + AccessLog: [], + Logger: NullLogger.new, + RequestCallback: method(:record_request) + } + @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE @port += 1 retry end + @requests = [] end - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) + def create_server(port, base_opts) + WEBrick::HTTPServer.new(base_opts) end def start @@ -38,6 +43,19 @@ def base_uri def setup_response(uri_path, &action) @server.mount_proc(uri_path, action) end + + def setup_ok_response(uri_path, body, content_type=nil, headers={}) + setup_response(uri_path) do |req, res| + res.status = 200 + res.content_type = content_type if !content_type.nil? + res.body = body + headers.each { |n, v| res[n] = v } + end + end + + def record_request(req, res) + @requests.push(req) + end end class StubProxyServer < StubHTTPServer @@ -49,19 +67,15 @@ def initialize @request_count = 0 end - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, + def create_server(port, base_opts) + WEBrick::HTTPProxyServer.new(base_opts.merge({ ProxyContentHandler: proc do |req,res| if !@connect_status.nil? res.status = @connect_status end @request_count += 1 end - ) + })) end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index b7838200..7f2b8ad7 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,52 +1,58 @@ +require "http_util" require "spec_helper" -require "faraday" describe LaunchDarkly::Requestor do describe ".request_all_flags" do describe "with a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :proxy => "http://proxy.com", - :base_uri => "http://ld.com" - }) - ) - } it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy[:uri]).to eq URI("http://proxy.com") - double(body: '{"foo": "bar"}', status: 200, headers: {}) + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + with_server(StubProxyServer.new) do |proxy| + config = LaunchDarkly::Config.new(base_uri: server.base_uri.to_s, proxy: proxy.base_uri.to_s) + r = LaunchDarkly::Requestor.new("sdk-key", config) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) + end end - - requestor.request_all_data() end end describe "without a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :base_uri => "http://ld.com" - }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy).to eq nil - double(body: '{"foo": "bar"}', status: 200, headers: {}) + it "sends headers" do + content = '{"flags": {}}' + sdk_key = 'sdk-key' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + r = LaunchDarkly::Requestor.new(sdk_key, LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + r.request_all_data + expect(server.requests.length).to eq 1 + req = server.requests[0] + expect(req.header['authorization']).to eq [sdk_key] + expect(req.header['user-agent']).to eq ["RubyClient/" + LaunchDarkly::VERSION] + end + end + + it "receives data" do + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + r = LaunchDarkly::Requestor.new("sdk-key", LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) + end + end + + it "handles Unicode content" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + # Note that the ETag header here is important because without it, the HTTP cache will not be used, + # and the cache is what required a fix to handle Unicode properly. See: + # https://github.com/launchdarkly/ruby-client/issues/90 + r = LaunchDarkly::Requestor.new("sdk-key", LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) end - requestor.request_all_data() end end end diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb index 54f1f5c7..3adca889 100644 --- a/spec/sse_client/sse_client_spec.rb +++ b/spec/sse_client/sse_client_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of SSEClient against a real server @@ -70,6 +70,28 @@ def with_client(client) end end + it "handles Unicode correctly (assuming UTF-8)" do + please = "proszę" + thank_you = "dziękuję" + events_body = <<-EOT +event: #{please} +data: #{thank_you} + +EOT + with_server do |server| + server.setup_ok_response("/", events_body, "text/event-stream") + + event_sink = Queue.new + client = subject.new(server.base_uri) do |c| + c.on_event { |event| event_sink << event } + end + + with_client(client) do |client| + expect(event_sink.pop).to eq(SSE::SSEEvent.new(please.to_sym, thank_you, nil)) + end + end + end + it "reconnects after error response" do events_body = <<-EOT event: go diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb index 7dfac9bd..fbe60b96 100644 --- a/spec/sse_client/streaming_http_spec.rb +++ b/spec/sse_client/streaming_http_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of HTTP requests against a real server @@ -119,6 +119,7 @@ def with_connection(cxn) end it "throws error if proxy responds with error status" do + body = "hi" with_server do |server| server.setup_response("/") do |req,res| res.body = body From 86820ea710d8698b21b78ac093487c918e26bcbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 15:31:44 -0800 Subject: [PATCH 065/292] initial Consul implementation --- .circleci/config.yml | 16 +++ ldclient-rb.gemspec | 1 + .../impl/integrations/consul_impl.rb | 132 ++++++++++++++++++ lib/ldclient-rb/integrations.rb | 12 ++ lib/ldclient-rb/integrations/consul.rb | 37 +++++ .../integrations/consul_feature_store_spec.rb | 37 +++++ 6 files changed, 235 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/consul_impl.rb create mode 100644 lib/ldclient-rb/integrations/consul.rb create mode 100644 spec/integrations/consul_feature_store_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index f19ae7bc..45540d63 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -33,30 +33,35 @@ jobs: <<: *ruby-docker-template docker: - image: circleci/ruby:2.2.9-jessie + - image: consul - image: redis - image: amazon/dynamodb-local test-2.3: <<: *ruby-docker-template docker: - image: circleci/ruby:2.3.6-jessie + - image: consul - image: redis - image: amazon/dynamodb-local test-2.4: <<: *ruby-docker-template docker: - image: circleci/ruby:2.4.4-stretch + - image: consul - image: redis - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: - image: circleci/ruby:2.5.1-stretch + - image: consul - image: redis - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - image: circleci/jruby:9-jdk + - image: consul - image: redis - image: amazon/dynamodb-local @@ -93,8 +98,19 @@ jobs: mv Gemfile.lock "Gemfile.lock.$i" done - run: + name: start DynamoDB command: docker run -p 8000:8000 amazon/dynamodb-local background: true + - run: + name: download Consul + command: wget https://releases.hashicorp.com/consul/0.8.0/consul_0.8.0_linux_amd64.zip + - run: + name: extract Consul + command: unzip consul_0.8.0_linux_amd64.zip + - run: + name: start Consul + command: ./consul agent -dev + background: true - run: name: run tests for all versions shell: /bin/bash -leo pipefail diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 8b1f4cc7..35fbf45c 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -25,6 +25,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "bundler", "~> 1.7" spec.add_development_dependency "rspec", "~> 3.2" spec.add_development_dependency "codeclimate-test-reporter", "~> 0" + spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" spec.add_development_dependency "rake", "~> 10.0" diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb new file mode 100644 index 00000000..48d308c2 --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -0,0 +1,132 @@ +require "json" + +module LaunchDarkly + module Impl + module Integrations + module Consul + # + # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. + # + class ConsulFeatureStoreCore + begin + require "diplomat" + CONSUL_ENABLED = true + rescue ScriptError, StandardError + CONSUL_ENABLED = false + end + + def initialize(opts) + if !CONSUL_ENABLED + raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") + end + + @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' + @logger = opts[:logger] || Config.default_logger + @client = Diplomat::Kv.new(configuration: opts[:consul_config]) + + @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") + end + + def init_internal(all_data) + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = set() + unused_old_keys.merge(@client.get(@prefix, keys: true, recurse: true)) + + ops = [] + num_items = 0 + + # Insert or update every provided item + all_data.each do |kind, items| + items.values.each do |item| + value = item.to_json + key = item_key(kind, item[:key]) + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => value } }) + unused_old_keys.delete(key) + num_items = num_items + 1 + end + end + + # Now delete any previously existing items whose keys were not in the current data + unused_old_keys.each do |tuple| + ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) + end + + # Now set the special key that we check in initialized_internal? + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => '' } }) + + ConsulUtil.batch_operations(ops) + + @logger.info { "Initialized database with #{num_items} items" } + end + + def get_internal(kind, key) + + resp = get_item_by_keys(namespace_for_kind(kind), key) + unmarshal_item(resp.item) + end + + def get_all_internal(kind) + items_out = {} + + items_out + end + + def upsert_internal(kind, new_item) + + end + + def initialized_internal? + + end + + def stop + # There's no way to close the Consul client + end + + private + + def item_key(kind, key) + kind_key(kind) + '/' + key + end + + def kind_key(kind) + @prefix + kind[:namespace] + end + + def inited_key + @prefix + '$inited' + end + + def marshal_item(kind, item) + make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ + VERSION_ATTRIBUTE => item[:version], + ITEM_JSON_ATTRIBUTE => item.to_json + }) + end + + def unmarshal_item(item) + return nil if item.nil? || item.length == 0 + json_attr = item[ITEM_JSON_ATTRIBUTE] + raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? + JSON.parse(json_attr, symbolize_names: true) + end + end + + class ConsulUtil + # + # Submits as many transactions as necessary to submit all of the given operations. + # The ops array is consumed. + # + def self.batch_write_requests(ops) + batch_size = 64 # Consul can only do this many at a time + while true + chunk = requests.shift(batch_size) + break if chunk.empty? + Diplomat::Kv.txn(chunk) + end + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index bfaed2eb..8c9f6249 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/integrations/consul" require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/util/store_wrapper" @@ -7,6 +8,17 @@ module LaunchDarkly # Tools for connecting the LaunchDarkly client to other software. # module Integrations + # + # Integration with [Consul](https://www.consul.io/). + # + # Note that in order to use this integration, you must first install the gem `diplomat`. + # + # @since 5.5.0 + # + module Consul + # code is in ldclient-rb/impl/integrations/consul_impl + end + # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb new file mode 100644 index 00000000..7450d3b9 --- /dev/null +++ b/lib/ldclient-rb/integrations/consul.rb @@ -0,0 +1,37 @@ +require "ldclient-rb/impl/integrations/consul_impl" +require "ldclient-rb/integrations/util/store_wrapper" + +module LaunchDarkly + module Integrations + module Consul + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def self.default_prefix + 'launchdarkly' + end + + # + # Creates a Consul-backed persistent feature store. + # + # To use this method, you must first install the gem `diplomat`. Then, put the object returned by + # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default + # Consul client configuration + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) + return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + end + end +end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb new file mode 100644 index 00000000..1aa6f919 --- /dev/null +++ b/spec/integrations/consul_feature_store_spec.rb @@ -0,0 +1,37 @@ +require "feature_store_spec_base" +#require "diplomat" +require "spec_helper" + + +$my_prefix = 'testprefix' +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + +$base_opts = { + prefix: $my_prefix, + logger: $null_log +} + +def create_consul_store(opts = {}) + LaunchDarkly::Integrations::Consul::new_feature_store( + opts.merge($base_opts).merge({ expiration: 60 })) +end + +def create_consul_store_uncached(opts = {}) + LaunchDarkly::Integrations::Consul::new_feature_store( + opts.merge($base_opts).merge({ expiration: 0 })) +end + + +describe "Consul feature store" do + + # These tests will all fail if there isn't a local Consul instance running. + + context "with local cache" do + include_examples "feature_store", method(:create_consul_store) + end + + context "without local cache" do + include_examples "feature_store", method(:create_consul_store_uncached) + end +end From 485a73dfdc23c25ff56db7fcdaf5ccf417df1579 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 15:45:12 -0800 Subject: [PATCH 066/292] use new SSE gem --- ldclient-rb.gemspec | 5 +- lib/ldclient-rb/stream.rb | 24 ++- lib/sse_client.rb | 4 - lib/sse_client/backoff.rb | 38 ---- lib/sse_client/sse_client.rb | 178 ----------------- lib/sse_client/sse_events.rb | 67 ------- lib/sse_client/streaming_http.rb | 199 ------------------- spec/sse_client/sse_client_spec.rb | 177 ----------------- spec/sse_client/sse_events_spec.rb | 100 ---------- spec/sse_client/sse_shared.rb | 82 -------- spec/sse_client/streaming_http_spec.rb | 263 ------------------------- spec/stream_spec.rb | 36 ++-- 12 files changed, 34 insertions(+), 1139 deletions(-) delete mode 100644 lib/sse_client.rb delete mode 100644 lib/sse_client/backoff.rb delete mode 100644 lib/sse_client/sse_client.rb delete mode 100644 lib/sse_client/sse_events.rb delete mode 100644 lib/sse_client/streaming_http.rb delete mode 100644 spec/sse_client/sse_client_spec.rb delete mode 100644 spec/sse_client/sse_events_spec.rb delete mode 100644 spec/sse_client/sse_shared.rb delete mode 100644 spec/sse_client/streaming_http_spec.rb diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 8b1f4cc7..9f7c5089 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -13,7 +13,7 @@ Gem::Specification.new do |spec| spec.summary = "LaunchDarkly SDK for Ruby" spec.description = "Official LaunchDarkly SDK for Ruby" spec.homepage = "https://github.com/launchdarkly/ruby-client" - spec.license = "Apache 2.0" + spec.license = "Apache-2.0" spec.files = `git ls-files -z`.split("\x0") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } @@ -40,6 +40,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "net-http-persistent", "~> 2.9" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0.4" spec.add_runtime_dependency "hashdiff", "~> 0.2" - spec.add_runtime_dependency "http_tools", '~> 0.4.5' - spec.add_runtime_dependency "socketry", "~> 0.5.1" + spec.add_runtime_dependency "ld-eventsource", '~> 1.0' end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 660d7063..adc4bf59 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -1,6 +1,6 @@ require "concurrent/atomics" require "json" -require "sse_client" +require "ld-eventsource" module LaunchDarkly # @private @@ -54,15 +54,18 @@ def start read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } - @es = SSE::SSEClient.new(@config.stream_uri + "/all", opts) do |conn| - conn.on_event { |event| process_message(event, event.type) } + @es = LaunchDarklySSE::SSEClient.new(@config.stream_uri + "/all", **opts) do |conn| + conn.on_event { |event| process_message(event) } conn.on_error { |err| - status = err[:status_code] - message = Util.http_error_message(status, "streaming connection", "will retry") - @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(status) - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop + case err + when LaunchDarklySSE::HTTPError + status = err.status + message = Util.http_error_message(status, "streaming connection", "will retry") + @config.logger.error { "[LDClient] #{message}" } + if !Util.http_error_recoverable?(status) + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop + end end } end @@ -79,7 +82,8 @@ def stop private - def process_message(message, method) + def process_message(message) + method = message.type @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT message = JSON.parse(message.data, symbolize_names: true) diff --git a/lib/sse_client.rb b/lib/sse_client.rb deleted file mode 100644 index dd24c3a6..00000000 --- a/lib/sse_client.rb +++ /dev/null @@ -1,4 +0,0 @@ -require "sse_client/streaming_http" -require "sse_client/sse_events" -require "sse_client/backoff" -require "sse_client/sse_client" diff --git a/lib/sse_client/backoff.rb b/lib/sse_client/backoff.rb deleted file mode 100644 index 73e0754f..00000000 --- a/lib/sse_client/backoff.rb +++ /dev/null @@ -1,38 +0,0 @@ - -module SSE - # - # A simple backoff algorithm that can be reset at any time, or reset itself after a given - # interval has passed without errors. - # - class Backoff - def initialize(base_interval, max_interval, auto_reset_interval = 60) - @base_interval = base_interval - @max_interval = max_interval - @auto_reset_interval = auto_reset_interval - @attempts = 0 - @last_good_time = nil - @jitter_rand = Random.new - end - - attr_accessor :base_interval - - def next_interval - if !@last_good_time.nil? && (Time.now.to_i - @last_good_time) >= @auto_reset_interval - @attempts = 0 - end - @last_good_time = nil - if @attempts == 0 - @attempts += 1 - return 0 - end - @last_good_time = nil - target = ([@base_interval * (2 ** @attempts), @max_interval].min).to_f - @attempts += 1 - (target / 2) + @jitter_rand.rand(target / 2) - end - - def mark_success - @last_good_time = Time.now.to_i if @last_good_time.nil? - end - end -end diff --git a/lib/sse_client/sse_client.rb b/lib/sse_client/sse_client.rb deleted file mode 100644 index 5b7e0fd9..00000000 --- a/lib/sse_client/sse_client.rb +++ /dev/null @@ -1,178 +0,0 @@ -require "concurrent/atomics" -require "logger" -require "thread" -require "uri" - -# -# A lightweight Server-Sent Events implementation based on the `socketry` gem. -# -# This module will be moved to a separate gem in the future. -# -# @private -# -module SSE - # - # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with - # read timeouts, and http_tools for HTTP response parsing. The overall logic is based on - # [https://github.com/Tonkpils/celluloid-eventsource]. - # - class SSEClient - DEFAULT_CONNECT_TIMEOUT = 10 - DEFAULT_READ_TIMEOUT = 300 - DEFAULT_RECONNECT_TIME = 1 - MAX_RECONNECT_TIME = 30 - - def initialize(uri, options = {}) - @uri = URI(uri) - @stopped = Concurrent::AtomicBoolean.new(false) - - @headers = options[:headers] ? options[:headers].clone : {} - @connect_timeout = options[:connect_timeout] || DEFAULT_CONNECT_TIMEOUT - @read_timeout = options[:read_timeout] || DEFAULT_READ_TIMEOUT - @logger = options[:logger] || default_logger - - if options[:proxy] - @proxy = options[:proxy] - else - proxyUri = @uri.find_proxy - if !proxyUri.nil? && (proxyUri.scheme == 'http' || proxyUri.scheme == 'https') - @proxy = proxyUri - end - end - - reconnect_time = options[:reconnect_time] || DEFAULT_RECONNECT_TIME - @backoff = Backoff.new(reconnect_time, MAX_RECONNECT_TIME) - - @on = { event: ->(_) {}, error: ->(_) {} } - @last_id = nil - - yield self if block_given? - - Thread.new do - run_stream - end - end - - def on(event_name, &action) - @on[event_name.to_sym] = action - end - - def on_event(&action) - @on[:event] = action - end - - def on_error(&action) - @on[:error] = action - end - - def close - if @stopped.make_true - @cxn.close if !@cxn.nil? - @cxn = nil - end - end - - private - - def default_logger - log = ::Logger.new($stdout) - log.level = ::Logger::WARN - log - end - - def run_stream - while !@stopped.value - @cxn = nil - begin - @cxn = connect - # There's a potential race if close was called in the middle of the previous line, i.e. after we - # connected but before @cxn was set. Checking the variable again is a bit clunky but avoids that. - return if @stopped.value - read_stream(@cxn) if !@cxn.nil? - rescue Errno::EBADF - # don't log this - it probably means we closed our own connection deliberately - rescue StandardError => e - @logger.error { "Unexpected error from event source: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - end - begin - @cxn.close if !@cxn.nil? - rescue StandardError => e - @logger.error { "Unexpected error while closing stream: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - end - end - end - - # Try to establish a streaming connection. Returns the StreamingHTTPConnection object if successful. - def connect - loop do - return if @stopped.value - interval = @backoff.next_interval - if interval > 0 - @logger.warn { "Will retry connection after #{'%.3f' % interval} seconds" } - sleep(interval) - end - begin - cxn = open_connection(build_headers) - if cxn.status != 200 - body = cxn.read_all # grab the whole response body in case it has error details - cxn.close - @on[:error].call({status_code: cxn.status, body: body}) - next - elsif cxn.headers["content-type"] && cxn.headers["content-type"].start_with?("text/event-stream") - return cxn # we're good to proceed - end - @logger.error { "Event source returned unexpected content type '#{cxn.headers["content-type"]}'" } - rescue Errno::EBADF - raise - rescue StandardError => e - @logger.error { "Unexpected error from event source: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - cxn.close if !cxn.nil? - end - # if unsuccessful, continue the loop to connect again - end - end - - # Just calls the StreamingHTTPConnection constructor - factored out for test purposes - def open_connection(headers) - StreamingHTTPConnection.new(@uri, @proxy, headers, @connect_timeout, @read_timeout) - end - - # Pipe the output of the StreamingHTTPConnection into the EventParser, and dispatch events as - # they arrive. - def read_stream(cxn) - event_parser = EventParser.new(cxn.read_lines) - event_parser.items.each do |item| - return if @stopped.value - case item - when SSEEvent - dispatch_event(item) - when SSESetRetryInterval - @backoff.base_interval = event.milliseconds.t-Of / 1000 - end - end - end - - def dispatch_event(event) - @last_id = event.id - - # Tell the Backoff object that as of the current time, we have succeeded in getting some data. It - # uses that information so it can automatically reset itself if enough time passes between failures. - @backoff.mark_success - - # Pass the event to the caller - @on[:event].call(event) - end - - def build_headers - h = { - 'Accept' => 'text/event-stream', - 'Cache-Control' => 'no-cache' - } - h['Last-Event-Id'] = @last_id if !@last_id.nil? - h.merge(@headers) - end - end -end diff --git a/lib/sse_client/sse_events.rb b/lib/sse_client/sse_events.rb deleted file mode 100644 index 762cc2b0..00000000 --- a/lib/sse_client/sse_events.rb +++ /dev/null @@ -1,67 +0,0 @@ - -module SSE - # Server-Sent Event type used by SSEClient and EventParser. - SSEEvent = Struct.new(:type, :data, :id) - - SSESetRetryInterval = Struct.new(:milliseconds) - - # - # Accepts lines of text via an iterator, and parses them into SSE messages. - # - class EventParser - def initialize(lines) - @lines = lines - reset_buffers - end - - # Generator that parses the input interator and returns instances of SSEEvent or SSERetryInterval. - def items - Enumerator.new do |gen| - @lines.each do |line| - line.chomp! - if line.empty? - event = maybe_create_event - reset_buffers - gen.yield event if !event.nil? - else - case line - when /^(\w+): ?(.*)$/ - item = process_field($1, $2) - gen.yield item if !item.nil? - end - end - end - end - end - - private - - def reset_buffers - @id = nil - @type = nil - @data = "" - end - - def process_field(name, value) - case name - when "event" - @type = value.to_sym - when "data" - @data << "\n" if !@data.empty? - @data << value - when "id" - @id = value - when "retry" - if /^(?\d+)$/ =~ value - return SSESetRetryInterval.new(num.to_i) - end - end - nil - end - - def maybe_create_event - return nil if @data.empty? - SSEEvent.new(@type || :message, @data, @id) - end - end -end diff --git a/lib/sse_client/streaming_http.rb b/lib/sse_client/streaming_http.rb deleted file mode 100644 index eeb80e82..00000000 --- a/lib/sse_client/streaming_http.rb +++ /dev/null @@ -1,199 +0,0 @@ -require "concurrent/atomics" -require "http_tools" -require "socketry" - -module SSE - # - # Wrapper around a socket providing a simplified HTTP request-response cycle including streaming. - # The socket is created and managed by Socketry, which we use so that we can have a read timeout. - # - class StreamingHTTPConnection - attr_reader :status, :headers - - def initialize(uri, proxy, headers, connect_timeout, read_timeout) - @socket = HTTPConnectionFactory.connect(uri, proxy, connect_timeout, read_timeout) - @socket.write(build_request(uri, headers)) - @reader = HTTPResponseReader.new(@socket, read_timeout) - @status = @reader.status - @headers = @reader.headers - @closed = Concurrent::AtomicBoolean.new(false) - end - - def close - if @closed.make_true - @socket.close if @socket - @socket = nil - end - end - - # Generator that returns one line of the response body at a time (delimited by \r, \n, - # or \r\n) until the response is fully consumed or the socket is closed. - def read_lines - @reader.read_lines - end - - # Consumes the entire response body and returns it. - def read_all - @reader.read_all - end - - private - - # Build an HTTP request line and headers. - def build_request(uri, headers) - ret = "GET #{uri.request_uri} HTTP/1.1\r\n" - ret << "Host: #{uri.host}\r\n" - headers.each { |k, v| - ret << "#{k}: #{v}\r\n" - } - ret + "\r\n" - end - end - - # - # Used internally to send the HTTP request, including the proxy dialogue if necessary. - # - class HTTPConnectionFactory - def self.connect(uri, proxy, connect_timeout, read_timeout) - if !proxy - return open_socket(uri, connect_timeout) - end - - socket = open_socket(proxy, connect_timeout) - socket.write(build_proxy_request(uri, proxy)) - - # temporarily create a reader just for the proxy connect response - proxy_reader = HTTPResponseReader.new(socket, read_timeout) - if proxy_reader.status != 200 - raise ProxyError, "proxy connection refused, status #{proxy_reader.status}" - end - - # start using TLS at this point if appropriate - if uri.scheme.downcase == 'https' - wrap_socket_in_ssl_socket(socket) - else - socket - end - end - - private - - def self.open_socket(uri, connect_timeout) - if uri.scheme.downcase == 'https' - Socketry::SSL::Socket.connect(uri.host, uri.port, timeout: connect_timeout) - else - Socketry::TCP::Socket.connect(uri.host, uri.port, timeout: connect_timeout) - end - end - - # Build a proxy connection header. - def self.build_proxy_request(uri, proxy) - ret = "CONNECT #{uri.host}:#{uri.port} HTTP/1.1\r\n" - ret << "Host: #{uri.host}:#{uri.port}\r\n" - if proxy.user || proxy.password - encoded_credentials = Base64.strict_encode64([proxy.user || '', proxy.password || ''].join(":")) - ret << "Proxy-Authorization: Basic #{encoded_credentials}\r\n" - end - ret << "\r\n" - ret - end - - def self.wrap_socket_in_ssl_socket(socket) - io = IO.try_convert(socket) - ssl_sock = OpenSSL::SSL::SSLSocket.new(io, OpenSSL::SSL::SSLContext.new) - ssl_sock.connect - Socketry::SSL::Socket.new.from_socket(ssl_sock) - end - end - - class ProxyError < StandardError - def initialize(message) - super - end - end - - # - # Used internally to read the HTTP response, either all at once or as a stream of text lines. - # Incoming data is fed into an instance of HTTPTools::Parser, which gives us the header and - # chunks of the body via callbacks. - # - class HTTPResponseReader - DEFAULT_CHUNK_SIZE = 10000 - - attr_reader :status, :headers - - def initialize(socket, read_timeout) - @socket = socket - @read_timeout = read_timeout - @parser = HTTPTools::Parser.new - @buffer = "" - @done = false - @lock = Mutex.new - - # Provide callbacks for the Parser to give us the headers and body. This has to be done - # before we start piping any data into the parser. - have_headers = false - @parser.on(:header) do - have_headers = true - end - @parser.on(:stream) do |data| - @lock.synchronize { @buffer << data } # synchronize because we're called from another thread in Socketry - end - @parser.on(:finish) do - @lock.synchronize { @done = true } - end - - # Block until the status code and headers have been successfully read. - while !have_headers - raise EOFError if !read_chunk_into_buffer - end - @headers = Hash[@parser.header.map { |k,v| [k.downcase, v] }] - @status = @parser.status_code - end - - def read_lines - Enumerator.new do |gen| - loop do - line = read_line - break if line.nil? - gen.yield line - end - end - end - - def read_all - while read_chunk_into_buffer - end - @buffer - end - - private - - # Attempt to read some more data from the socket. Return true if successful, false if EOF. - # A read timeout will result in an exception from Socketry's readpartial method. - def read_chunk_into_buffer - # If @done is set, it means the Parser has signaled end of response body - @lock.synchronize { return false if @done } - data = @socket.readpartial(DEFAULT_CHUNK_SIZE, timeout: @read_timeout) - return false if data == :eof - @parser << data - # We are piping the content through the parser so that it can handle things like chunked - # encoding for us. The content ends up being appended to @buffer via our callback. - true - end - - # Extract the next line of text from the read buffer, refilling the buffer as needed. - def read_line - loop do - @lock.synchronize do - i = @buffer.index(/[\r\n]/) - if !i.nil? - i += 1 if (@buffer[i] == "\r" && i < @buffer.length - 1 && @buffer[i + 1] == "\n") - return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8) - end - end - return nil if !read_chunk_into_buffer - end - end - end -end diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb deleted file mode 100644 index 54f1f5c7..00000000 --- a/spec/sse_client/sse_client_spec.rb +++ /dev/null @@ -1,177 +0,0 @@ -require "spec_helper" -require "socketry" -require "sse_client/sse_shared" - -# -# End-to-end tests of SSEClient against a real server -# -describe SSE::SSEClient do - subject { SSE::SSEClient } - - def with_client(client) - begin - yield client - ensure - client.close - end - end - - it "sends expected headers" do - with_server do |server| - requests = Queue.new - server.setup_response("/") do |req,res| - requests << req - res.content_type = "text/event-stream" - res.status = 200 - end - - headers = { - "Authorization" => "secret" - } - - with_client(subject.new(server.base_uri, headers: headers)) do |client| - received_req = requests.pop - expect(received_req.header).to eq({ - "accept" => ["text/event-stream"], - "cache-control" => ["no-cache"], - "host" => ["127.0.0.1"], - "authorization" => ["secret"] - }) - end - end - end - - it "receives messages" do - events_body = <<-EOT -event: go -data: foo -id: 1 - -event: stop -data: bar - -EOT - with_server do |server| - server.setup_response("/") do |req,res| - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - - event_sink = Queue.new - client = subject.new(server.base_uri) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", "1")) - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:stop, "bar", nil)) - end - end - end - - it "reconnects after error response" do - events_body = <<-EOT -event: go -data: foo - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - res.status = 500 - res.body = "sorry" - res.keep_alive = false - else - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - end - - event_sink = Queue.new - error_sink = Queue.new - client = subject.new(server.base_uri, reconnect_time: 0.25) do |c| - c.on_event { |event| event_sink << event } - c.on_error { |error| error_sink << error } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(error_sink.pop).to eq({ status_code: 500, body: "sorry" }) - expect(attempt).to be >= 2 - end - end - end - - it "reconnects after read timeout" do - events_body = <<-EOT -event: go -data: foo - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - sleep(2) - end - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - - event_sink = Queue.new - client = subject.new(server.base_uri, - reconnect_time: 0.25, read_timeout: 0.25) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(attempt).to be >= 2 - end - end - end - - it "reconnects if stream returns EOF" do - events_body_1 = <<-EOT -event: go -data: foo - -EOT - events_body_2 = <<-EOT -event: go -data: bar - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - res.body = events_body_1 - else - res.body = events_body_2 - end - res.content_type = "text/event-stream" - res.status = 200 - end - - event_sink = Queue.new - client = subject.new(server.base_uri, - reconnect_time: 0.25, read_timeout: 0.25) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "bar", nil)) - expect(attempt).to be >= 2 - end - end - end -end diff --git a/spec/sse_client/sse_events_spec.rb b/spec/sse_client/sse_events_spec.rb deleted file mode 100644 index 438cfa7a..00000000 --- a/spec/sse_client/sse_events_spec.rb +++ /dev/null @@ -1,100 +0,0 @@ -require "spec_helper" - -describe SSE::EventParser do - subject { SSE::EventParser } - - it "parses an event with all fields" do - lines = [ - "event: abc\r\n", - "data: def\r\n", - "id: 1\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:abc, "def", "1") - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses an event with only data" do - lines = [ - "data: def\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses an event with multi-line data" do - lines = [ - "data: def\r\n", - "data: ghi\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def\nghi", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "ignores comments" do - lines = [ - ":", - "data: def\r\n", - ":", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses reconnect interval" do - lines = [ - "retry: 2500\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_item = SSE::SSESetRetryInterval.new(2500) - output = ep.items.to_a - expect(output).to eq([ expected_item ]) - end - - it "parses multiple events" do - lines = [ - "event: abc\r\n", - "data: def\r\n", - "id: 1\r\n", - "\r\n", - "data: ghi\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event_1 = SSE::SSEEvent.new(:abc, "def", "1") - expected_event_2 = SSE::SSEEvent.new(:message, "ghi", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event_1, expected_event_2 ]) - end - - it "ignores events with no data" do - lines = [ - "event: nothing\r\n", - "\r\n", - "event: nada\r\n", - "\r\n" - ] - ep = subject.new(lines) - - output = ep.items.to_a - expect(output).to eq([]) - end -end diff --git a/spec/sse_client/sse_shared.rb b/spec/sse_client/sse_shared.rb deleted file mode 100644 index 3ecabb57..00000000 --- a/spec/sse_client/sse_shared.rb +++ /dev/null @@ -1,82 +0,0 @@ -require "spec_helper" -require "webrick" -require "webrick/httpproxy" -require "webrick/https" - -class StubHTTPServer - def initialize - @port = 50000 - begin - @server = create_server(@port) - rescue Errno::EADDRINUSE - @port += 1 - retry - end - end - - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) - end - - def start - Thread.new { @server.start } - end - - def stop - @server.shutdown - end - - def base_uri - URI("http://127.0.0.1:#{@port}") - end - - def setup_response(uri_path, &action) - @server.mount_proc(uri_path, action) - end -end - -class StubProxyServer < StubHTTPServer - attr_reader :request_count - attr_accessor :connect_status - - def initialize - super - @request_count = 0 - end - - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, - ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? - res.status = @connect_status - end - @request_count += 1 - end - ) - end -end - -class NullLogger - def method_missing(*) - self - end -end - -def with_server(server = nil) - server = StubHTTPServer.new if server.nil? - begin - server.start - yield server - ensure - server.stop - end -end diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb deleted file mode 100644 index 7dfac9bd..00000000 --- a/spec/sse_client/streaming_http_spec.rb +++ /dev/null @@ -1,263 +0,0 @@ -require "spec_helper" -require "socketry" -require "sse_client/sse_shared" - -# -# End-to-end tests of HTTP requests against a real server -# -describe SSE::StreamingHTTPConnection do - subject { SSE::StreamingHTTPConnection } - - def with_connection(cxn) - begin - yield cxn - ensure - cxn.close - end - end - - it "makes HTTP connection and sends request" do - with_server do |server| - requests = Queue.new - server.setup_response("/foo") do |req,res| - requests << req - res.status = 200 - end - headers = { - "Accept" => "text/plain" - } - with_connection(subject.new(server.base_uri.merge("/foo?bar"), nil, headers, 30, 30)) do - received_req = requests.pop - expect(received_req.unparsed_uri).to eq("/foo?bar") - expect(received_req.header).to eq({ - "accept" => ["text/plain"], - "host" => [server.base_uri.host] - }) - end - end - end - - it "receives response status" do - with_server do |server| - server.setup_response("/foo") do |req,res| - res.status = 204 - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq(204) - end - end - end - - it "receives response headers" do - with_server do |server| - server.setup_response("/foo") do |req,res| - res["Content-Type"] = "application/json" - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - expect(cxn.headers["content-type"]).to eq("application/json") - end - end - end - - it "can read response as lines" do - body = <<-EOT -This is -a response -EOT - with_server do |server| - server.setup_response("/foo") do |req,res| - res.body = body - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - lines = cxn.read_lines - expect(lines.next).to eq("This is\n") - expect(lines.next).to eq("a response\n") - end - end - end - - it "can read entire response body" do - body = <<-EOT -This is -a response -EOT - with_server do |server| - server.setup_response("/foo") do |req,res| - res.body = body - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - read_body = cxn.read_all - expect(read_body).to eq("This is\na response\n") - end - end - end - - it "enforces read timeout" do - with_server do |server| - server.setup_response("/") do |req,res| - sleep(2) - res.status = 200 - end - expect { subject.new(server.base_uri, nil, {}, 30, 0.25) }.to raise_error(Socketry::TimeoutError) - end - end - - it "connects to HTTP server through proxy" do - body = "hi" - with_server do |server| - server.setup_response("/") do |req,res| - res.body = body - end - with_server(StubProxyServer.new) do |proxy| - with_connection(subject.new(server.base_uri, proxy.base_uri, {}, 30, 30)) do |cxn| - read_body = cxn.read_all - expect(read_body).to eq("hi") - expect(proxy.request_count).to eq(1) - end - end - end - end - - it "throws error if proxy responds with error status" do - with_server do |server| - server.setup_response("/") do |req,res| - res.body = body - end - with_server(StubProxyServer.new) do |proxy| - proxy.connect_status = 403 - expect { subject.new(server.base_uri, proxy.base_uri, {}, 30, 30) }.to raise_error(SSE::ProxyError) - end - end - end - - # The following 2 tests were originally written to connect to an embedded HTTPS server made with - # WEBrick. Unfortunately, some unknown problem prevents WEBrick's self-signed certificate feature - # from working in JRuby 9.1 (but not in any other Ruby version). Therefore these tests currently - # hit an external URL. - - it "connects to HTTPS server" do - with_connection(subject.new(URI("https://app.launchdarkly.com"), nil, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq 200 - end - end - - it "connects to HTTPS server through proxy" do - with_server(StubProxyServer.new) do |proxy| - with_connection(subject.new(URI("https://app.launchdarkly.com"), proxy.base_uri, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq 200 - expect(proxy.request_count).to eq(1) - end - end - end -end - -# -# Tests of response parsing functionality without a real HTTP request -# -describe SSE::HTTPResponseReader do - subject { SSE::HTTPResponseReader } - - let(:simple_response) { <<-EOT -HTTP/1.1 200 OK -Cache-Control: no-cache -Content-Type: text/event-stream - -line1\r -line2 -\r -EOT - } - - def make_chunks(str) - # arbitrarily split content into 5-character blocks - str.scan(/.{1,5}/m).to_enum - end - - def mock_socket_without_timeout(chunks) - mock_socket(chunks) { :eof } - end - - def mock_socket_with_timeout(chunks) - mock_socket(chunks) { raise Socketry::TimeoutError } - end - - def mock_socket(chunks) - sock = double - allow(sock).to receive(:readpartial) do - begin - chunks.next - rescue StopIteration - yield - end - end - sock - end - - it "parses status code" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.status).to eq(200) - end - - it "parses headers" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.headers).to eq({ - 'cache-control' => 'no-cache', - 'content-type' => 'text/event-stream' - }) - end - - it "can read entire response body" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.read_all).to eq("line1\r\nline2\n\r\n") - end - - it "can read response body as lines" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.read_lines.to_a).to eq([ - "line1\r\n", - "line2\n", - "\r\n" - ]) - end - - it "handles chunked encoding" do - chunked_response = <<-EOT -HTTP/1.1 200 OK -Content-Type: text/plain -Transfer-Encoding: chunked - -6\r -things\r -A\r - and stuff\r -0\r -\r -EOT - socket = mock_socket_without_timeout(make_chunks(chunked_response)) - reader = subject.new(socket, 0) - expect(reader.read_all).to eq("things and stuff") - end - - it "raises error if response ends without complete headers" do - malformed_response = <<-EOT -HTTP/1.1 200 OK -Cache-Control: no-cache -EOT - socket = mock_socket_without_timeout(make_chunks(malformed_response)) - expect { subject.new(socket, 0) }.to raise_error(EOFError) - end - - it "throws timeout if thrown by socket read" do - socket = mock_socket_with_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - lines = reader.read_lines - lines.next - lines.next - lines.next - expect { lines.next }.to raise_error(Socketry::TimeoutError) - end -end diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index df27e173..0ab9d3ec 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -1,5 +1,5 @@ +require "ld-eventsource" require "spec_helper" -require 'ostruct' describe LaunchDarkly::StreamProcessor do subject { LaunchDarkly::StreamProcessor } @@ -8,52 +8,52 @@ let(:processor) { subject.new("sdk_key", config, requestor) } describe '#process_message' do - let(:put_message) { OpenStruct.new({data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}'}) } - let(:patch_flag_message) { OpenStruct.new({data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}'}) } - let(:patch_seg_message) { OpenStruct.new({data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}'}) } - let(:delete_flag_message) { OpenStruct.new({data: '{"path": "/flags/key", "version": 2}'}) } - let(:delete_seg_message) { OpenStruct.new({data: '{"path": "/segments/key", "version": 2}'}) } - let(:indirect_patch_flag_message) { OpenStruct.new({data: "/flags/key"}) } - let(:indirect_patch_segment_message) { OpenStruct.new({data: "/segments/key"}) } + let(:put_message) { LaunchDarklySSE::StreamEvent.new(type: :put, data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } + let(:patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } + let(:patch_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } + let(:delete_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/flags/key", "version": 2}') } + let(:delete_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/segments/key", "version": 2}') } + let(:indirect_patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/put', data: "/flags/key") } + let(:indirect_patch_segment_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/patch', data: "/segments/key") } it "will accept PUT methods" do - processor.send(:process_message, put_message, LaunchDarkly::PUT) + processor.send(:process_message, put_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do - processor.send(:process_message, patch_flag_message, LaunchDarkly::PATCH) + processor.send(:process_message, patch_flag_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do - processor.send(:process_message, patch_seg_message, LaunchDarkly::PATCH) + processor.send(:process_message, patch_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do - processor.send(:process_message, patch_flag_message, LaunchDarkly::PATCH) - processor.send(:process_message, delete_flag_message, LaunchDarkly::DELETE) + processor.send(:process_message, patch_flag_message) + processor.send(:process_message, delete_flag_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do - processor.send(:process_message, patch_seg_message, LaunchDarkly::PATCH) - processor.send(:process_message, delete_seg_message, LaunchDarkly::DELETE) + processor.send(:process_message, patch_seg_message) + processor.send(:process_message, delete_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will accept INDIRECT PATCH method for flags" do flag = { key: 'key', version: 1 } allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) - processor.send(:process_message, indirect_patch_flag_message, LaunchDarkly::INDIRECT_PATCH); + processor.send(:process_message, indirect_patch_flag_message); expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) end it "will accept INDIRECT PATCH method for segments" do segment = { key: 'key', version: 1 } allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) - processor.send(:process_message, indirect_patch_segment_message, LaunchDarkly::INDIRECT_PATCH); + processor.send(:process_message, indirect_patch_segment_message); expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn - processor.send(:process_message, put_message, "get") + processor.send(:process_message, LaunchDarklySSE::StreamEvent.new(type: :get, data: "", id: nil)) end end end From 85674397211e249ffad3a9d8c2b9607aa32f180f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:32:39 -0800 Subject: [PATCH 067/292] numerous Ruby SDK documentation fixes --- CONTRIBUTING.md | 12 --- README.md | 75 ++++++++--------- lib/ldclient-rb/config.rb | 2 +- lib/ldclient-rb/impl.rb | 5 +- .../impl/integrations/dynamodb_impl.rb | 3 - lib/ldclient-rb/in_memory_store.rb | 7 +- lib/ldclient-rb/integrations/dynamodb.rb | 20 ++++- lib/ldclient-rb/integrations/redis.rb | 11 ++- lib/ldclient-rb/interfaces.rb | 7 +- lib/ldclient-rb/ldclient.rb | 82 +++++++++++++------ lib/ldclient-rb/redis_store.rb | 2 +- lib/ldclient-rb/version.rb | 1 + scripts/gendocs.sh | 3 + 13 files changed, 140 insertions(+), 90 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 96147068..c6b8dd20 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,15 +2,3 @@ Contributing to LaunchDarkly SDK for Ruby ========================================= We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. - -Dependencies ------------- -[ld-em-eventsource](https://github.com/launchdarkly/em-eventsource) - - -Style ------ - -Our pull requests have [Hound CI](https://houndci.com/) set up to do style checking. -We also run [Rubocop](https://github.com/bbatsov/rubocop). - diff --git a/README.md b/README.md index 43819554..4812690f 100644 --- a/README.md +++ b/README.md @@ -15,37 +15,37 @@ This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1 Quick setup ----------- -0. Install the Ruby SDK with `gem` +1. Install the Ruby SDK with `gem` -```shell + ```shell gem install ldclient-rb ``` -1. Require the LaunchDarkly client: +2. Require the LaunchDarkly client: -```ruby + ```ruby require 'ldclient-rb' ``` -2. Create a new LDClient with your SDK key: +3. Create a new LDClient with your SDK key: -```ruby + ```ruby client = LaunchDarkly::LDClient.new("your_sdk_key") ``` ### Ruby on Rails -0. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` +1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` -1. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: +2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: -```ruby + ```ruby Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") ``` -2. You may want to include a function in your ApplicationController +3. You may want to include a function in your ApplicationController -```ruby + ```ruby def launchdarkly_settings if current_user.present? { @@ -72,31 +72,44 @@ Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") end ``` -3. In your controllers, access the client using +4. In your controllers, access the client using -```ruby + ```ruby Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) ``` Note that this gem will automatically switch to using the Rails logger it is detected. +Your first feature flag +----------------------- + +1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com). +2. In your application code, use the feature's key to check whether the flag is on for each user: + +```ruby +if client.variation("your.flag.key", {key: "user@test.com"}, false) + # application code to show the feature +else + # the code to run if the feature is off +end +``` + HTTPS proxy ------------- -The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. +----------- + +The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. (HTTP_PROXY is not used because all LaunchDarkly services require HTTPS.) How to set the HTTPS_PROXY environment variable on Mac/Linux systems: ``` export HTTPS_PROXY=https://web-proxy.domain.com:8080 ``` - How to set the HTTPS_PROXY environment variable on Windows systems: ``` set HTTPS_PROXY=https://web-proxy.domain.com:8080 ``` - If your proxy requires authentication then you can prefix the URN with your login information: ``` export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 @@ -106,34 +119,22 @@ or set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 ``` - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com) -2. In your application code, use the feature's key to check whether the flag is on for each user: - -```ruby -if client.variation("your.flag.key", {key: "user@test.com"}, false) - # application code to show the feature -else - # the code to run if the feature is off -end -``` - Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the `LaunchDarkly::Integrations` module and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Using flag data from a file --------------------------- -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See `LaunchDarkly::FileDataSource` or the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. Learn more ----------- -Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). +Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). + +Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/ldclient-rb). Testing ------- @@ -143,10 +144,10 @@ We run integration tests for all our SDKs using a centralized test harness. This Contributing ------------ -See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md) +See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md). About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 64ad7378..34f4f67b 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -346,7 +346,7 @@ def self.default_proxy # # The default value for {#logger}. - # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise + # @return [Logger] the Rails logger if in Rails, or a default Logger at WARN level otherwise # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 3df0d7e3..b0d63ebe 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -1,10 +1,11 @@ module LaunchDarkly # - # Low-level implementation classes. Everything in this module should be considered non-public - # and subject to change with any release. + # Internal implementation classes. Everything in this module should be considered unsupported + # and subject to change. # # @since 5.5.0 + # @private # module Impl # code is in ldclient-rb/impl/ diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index ebaa0445..a76fae52 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -1,4 +1,3 @@ -require "concurrent/atomics" require "json" module LaunchDarkly @@ -36,8 +35,6 @@ def initialize(table_name, opts) @prefix = opts[:prefix] @logger = opts[:logger] || Config.default_logger - @stopped = Concurrent::AtomicBoolean.new(false) - if !opts[:existing_client].nil? @client = opts[:existing_client] else diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 4814c85d..f2843c1e 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -6,18 +6,21 @@ module LaunchDarkly # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. + + # @private FEATURES = { namespace: "features" }.freeze + # @private SEGMENTS = { namespace: "segments" }.freeze # # Default implementation of the LaunchDarkly client's feature store, using an in-memory - # cache. This object holds feature flags and related data received from the - # streaming API. + # cache. This object holds feature flags and related data received from LaunchDarkly. + # Database-backed implementations are available in {LaunchDarkly::Integrations}. # class InMemoryFeatureStore include LaunchDarkly::Interfaces::FeatureStore diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index c9ded019..ecd87fce 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,12 +5,30 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent feature store. + # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the + # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property # of your client configuration ({LaunchDarkly::Config}). # + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") + # config = LaunchDarkly::Config.new(feature_store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # Note that the specified table must already exist in DynamoDB. It must have a partition key called + # "namespace", and a sort key called "key" (both strings). The SDK does not create the table + # automatically because it has no way of knowing what additional properties (such as permissions + # and throughput) you would want it to have. + # + # By default, the DynamoDB client will try to get your AWS credentials and region name from + # environment variables and/or local configuration files, as described in the AWS SDK documentation. + # You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an + # already-configured DynamoDB client in `existing_client`. + # + # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index b81097c6..34509181 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -23,11 +23,18 @@ def self.default_prefix end # - # Creates a Redis-backed persistent feature store. + # Creates a Redis-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the + # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). + # client configuration. + # + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(feature_store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 912472b5..094ce0dd 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -17,9 +17,10 @@ module Interfaces # `:deleted`, a boolean (optional, defaults to false) that if true means this is a # placeholder for a deleted entity. # - # Examples of a "kind" are feature flags and segments; each of these is associated with an - # object such as {LaunchDarkly::FEATURES} and {LaunchDarkly::SEGMENTS}. The "kind" objects are - # hashes with a single property, `:namespace`, which is a short string unique to that kind. + # To represent the different kinds of objects that can be stored, such as feature flags and + # segments, the SDK will provide a "kind" object; this is a hash with a single property, + # `:namespace`, which is a short string unique to that kind. This string can be used as a + # collection name or a key prefix. # # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 868c65bd..5788d276 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -17,10 +17,17 @@ class LDClient # configuration parameter can also supplied to specify advanced options, # but for most use cases, the default configuration is appropriate. # + # The client will immediately attempt to connect to LaunchDarkly and retrieve + # your feature flag data. If it cannot successfully do so within the time limit + # specified by `wait_for_sec`, the constructor will return a client that is in + # an uninitialized state. See {#initialized?} for more details. + # # @param sdk_key [String] the SDK key for your LaunchDarkly account # @param config [Config] an optional client configuration object + # @param wait_for_sec [Float] maximum time (in seconds) to wait for initialization # # @return [LDClient] The LaunchDarkly client instance + # def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key @config = config @@ -85,7 +92,7 @@ def toggle?(key, user, default = false) # # Creates a hash string that can be used by the JavaScript SDK to identify a user. - # For more information, see ["Secure mode"](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # For more information, see [Secure mode](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). # # @param user [Hash] the user properties # @return [String] a hash string @@ -94,44 +101,61 @@ def secure_mode_hash(user) OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) end - # Returns whether the client has been initialized and is ready to serve feature flag requests + # + # Returns whether the client has been initialized and is ready to serve feature flag requests. + # + # If this returns false, it means that the client did not succeed in connecting to + # LaunchDarkly within the time limit that you specified in the constructor. It could + # still succeed in connecting at a later time (on another thread), or it could have + # given up permanently (for instance, if your SDK key is invalid). In the meantime, + # any call to {#variation} or {#variation_detail} will behave as follows: + # + # 1. It will check whether the feature store already contains data (that is, you + # are using a database-backed store and it was populated by a previous run of this + # application). If so, it will use the last known feature flag data. + # + # 2. Failing that, it will return the value that you specified for the `default` + # parameter of {#variation} or {#variation_detail}. + # # @return [Boolean] true if the client has been initialized + # def initialized? @config.offline? || @config.use_ldd? || @data_source.initialized? end # - # Determines the variation of a feature flag to present to a user. At a minimum, - # the user hash should contain a `:key`. + # Determines the variation of a feature flag to present to a user. # - # @example Basic user hash - # {key: "user@example.com"} + # At a minimum, the user hash should contain a `:key`, which should be the unique + # identifier for your user (or, for an anonymous user, a session identifier or + # cookie). # - # For authenticated users, the `:key` should be the unique identifier for - # your user. For anonymous users, the `:key` should be a session identifier - # or cookie. In either case, the only requirement is that the key - # is unique to a user. + # Other supported user attributes include IP address, country code, and an arbitrary hash of + # custom attributes. For more about the supported user properties and how they work in + # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/docs/targeting-users). + # + # The optional `:privateAttributeNames` user property allows you to specify a list of + # attribute names that should not be sent back to LaunchDarkly. + # [Private attributes](https://docs.launchdarkly.com/docs/private-user-attributes) + # can also be configured globally in {Config}. # - # You can also pass IP addresses and country codes in the user hash. + # @example Basic user hash + # {key: "my-user-id"} # # @example More complete user hash - # {key: "user@example.com", ip: "127.0.0.1", country: "US"} - # - # The user hash can contain arbitrary custom attributes stored in a `:custom` sub-hash: - # - # @example A user hash with custom attributes - # {key: "user@example.com", custom: {customer_rank: 1000, groups: ["google", "microsoft"]}} + # {key: "my-user-id", ip: "127.0.0.1", country: "US", custom: {customer_rank: 1000}} # - # Attribute values in the custom hash can be integers, booleans, strings, or - # lists of integers, booleans, or strings. + # @example User with a private attribute + # {key: "my-user-id", email: "email@example.com", privateAttributeNames: ["email"]} # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default the default value of the flag + # @param default the default value of the flag; this is used if there is an error + # condition making it impossible to find or evaluate the flag + # + # @return the variation to show the user, or the default value if there's an an error # - # @return the variation to show the user, or the - # default value if there's an an error def variation(key, user, default) evaluate_internal(key, user, default, false).value end @@ -148,10 +172,14 @@ def variation(key, user, default) # Calling `variation_detail` instead of `variation` also causes the "reason" data to # be included in analytics events, if you are capturing detailed event data for this flag. # + # For more information, see the reference guide on + # [Evaluation reasons](https://docs.launchdarkly.com/v2.0/docs/evaluation-reasons). + # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default the default value of the flag + # @param default the default value of the flag; this is used if there is an error + # condition making it impossible to find or evaluate the flag # # @return [EvaluationDetail] an object describing the result # @@ -198,9 +226,11 @@ def track(event_name, user, data) end # - # Returns all feature flag values for the given user. This method is deprecated - please use - # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics - # events correctly if you pass the result of `all_flags`. + # Returns all feature flag values for the given user. + # + # @deprecated Please use {#all_flags_state} instead. Current versions of the + # client-side SDK will not generate analytics events correctly if you pass the + # result of `all_flags`. # # @param user [Hash] The end user requesting the feature flags # @return [Hash] a hash of feature flag keys to values diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 32a9507d..6ab7dd96 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -16,7 +16,7 @@ module LaunchDarkly # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific - # implementation class may change in the future. + # implementation class may be changed or removed in the future. # class RedisFeatureStore include LaunchDarkly::Interfaces::FeatureStore diff --git a/lib/ldclient-rb/version.rb b/lib/ldclient-rb/version.rb index a70241bf..b526a871 100644 --- a/lib/ldclient-rb/version.rb +++ b/lib/ldclient-rb/version.rb @@ -1,3 +1,4 @@ module LaunchDarkly + # The current gem version. VERSION = "5.4.1" end diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 6280355e..1e545955 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -1,5 +1,8 @@ #!/bin/bash +# Use this script to generate documentation locally in ./doc so it can be proofed before release. +# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb + gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting From 63c3680ab438609184e1593309636314ded27141 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:36:03 -0800 Subject: [PATCH 068/292] fix bundler version --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f19ae7bc..4d0800f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -20,7 +20,7 @@ ruby-docker-template: &ruby-docker-template fi - run: ruby -v - run: gem install bundler - - run: bundle install + - run: bundle install -v 1.17.3 - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: @@ -88,7 +88,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v 1.17.3; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 008331b1cff9cda159dfaf7f0cb65873afeaec5c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:41:19 -0800 Subject: [PATCH 069/292] fix build --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4d0800f1..a672a100 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,8 +19,8 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler - - run: bundle install -v 1.17.3 + - run: gem install bundler -v 1.17.3 + - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: From c09c166ccd78055cc2dcb7778cc4779d97350796 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:23:48 -0800 Subject: [PATCH 070/292] make some dependencies less strict and remove some unused ones --- ldclient-rb.gemspec | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 0b8f4f9d..46dac190 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -34,11 +34,9 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] - spec.add_runtime_dependency "semantic", "~> 1.6.0" - spec.add_runtime_dependency "thread_safe", "~> 0.3" + spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "net-http-persistent", "~> 2.9" - spec.add_runtime_dependency "concurrent-ruby", "~> 1.0.4" - spec.add_runtime_dependency "hashdiff", "~> 0.2" + spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" end From a4ced95117f3b47b14d2048fa5e7deb1088becbd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:32:25 -0800 Subject: [PATCH 071/292] not using thread_safe --- lib/ldclient-rb/cache_store.rb | 12 ++++++------ lib/ldclient-rb/redis_store.rb | 1 - 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 0677da65..164534fb 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -1,12 +1,12 @@ -require "thread_safe" +require "concurrent/map" module LaunchDarkly - # A thread-safe in-memory store suitable for use - # with the Faraday caching HTTP client. Uses the - # Threadsafe gem as the underlying cache. + # + # A thread-safe in-memory store suitable for use with the Faraday caching HTTP client. Uses the + # concurrent-ruby gem's Map as the underlying cache. # # @see https://github.com/plataformatec/faraday-http-cache - # @see https://github.com/ruby-concurrency/thread_safe + # @see https://github.com/ruby-concurrency # class ThreadSafeMemoryStore # @@ -14,7 +14,7 @@ class ThreadSafeMemoryStore # # @return [ThreadSafeMemoryStore] a new store def initialize - @cache = ThreadSafe::Cache.new + @cache = Concurrent::Map.new end # diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 3729ca6b..c9b1bc64 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,5 @@ require "concurrent/atomics" require "json" -require "thread_safe" module LaunchDarkly # From 806bb8e8fb7b665eb2ac68df583fe186d9cf9ca7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:34:01 -0800 Subject: [PATCH 072/292] add bundler version (still not sure why we need to) --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..544bd9ae 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler + - run: gem install bundler -v 1.17.3 - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v 1.17.3; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 9d446c85cd15f7375886f922d455de6cef8c8062 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 13:02:03 -0800 Subject: [PATCH 073/292] don't need bundler version for all rubies --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 544bd9ae..d742e552 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler -v 1.17.3; + gem install bundler; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 5516745a0c16d84d2b2420b3e7b84f37f1353f5e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 13:13:59 -0800 Subject: [PATCH 074/292] fix bundler version again --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d742e552..85f6f7cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler -v 1.17.3 + - run: gem install bundler -v "~> 1.7" - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 54add1dcc64525b22a0e558eb3024e7b60adcf41 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 15:52:35 -0800 Subject: [PATCH 075/292] try to fix bundler version again --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85f6f7cf..5a66f0ec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,6 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 3d4b08067de23b9fa77d061f419b788eb7bd1405 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:01:27 -0800 Subject: [PATCH 076/292] yet another build fix --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5a66f0ec..d08d8c0c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled + yes | gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 635adf44c4bc9635111535f49ce16a1dd079d059 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:35:25 -0800 Subject: [PATCH 077/292] commit lock file to get correct bundler --- .circleci/config.yml | 1 - .gitignore | 1 - Gemfile.lock | 111 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 Gemfile.lock diff --git a/.circleci/config.yml b/.circleci/config.yml index d08d8c0c..85f6f7cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,6 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - yes | gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" diff --git a/.gitignore b/.gitignore index bb576123..3f9d02f2 100644 --- a/.gitignore +++ b/.gitignore @@ -12,5 +12,4 @@ *.a mkmf.log *.gem -Gemfile.lock .DS_Store diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 00000000..17c5725e --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,111 @@ +PATH + remote: . + specs: + ldclient-rb (5.4.1) + concurrent-ruby (~> 1.0.4) + faraday (>= 0.9, < 2) + faraday-http-cache (>= 1.3.0, < 3) + hashdiff (~> 0.2) + http_tools (~> 0.4.5) + json (>= 1.8, < 3) + net-http-persistent (~> 2.9) + semantic (~> 1.6.0) + socketry (~> 0.5.1) + thread_safe (~> 0.3) + +GEM + remote: https://rubygems.org/ + specs: + aws-eventstream (1.0.1) + aws-partitions (1.125.0) + aws-sdk-core (3.44.0) + aws-eventstream (~> 1.0) + aws-partitions (~> 1.0) + aws-sigv4 (~> 1.0) + jmespath (~> 1.0) + aws-sdk-dynamodb (1.18.0) + aws-sdk-core (~> 3, >= 3.39.0) + aws-sigv4 (~> 1.0) + aws-sigv4 (1.0.3) + codeclimate-test-reporter (0.6.0) + simplecov (>= 0.7.1, < 1.0.0) + concurrent-ruby (1.0.5) + concurrent-ruby (1.0.5-java) + connection_pool (2.2.1) + diff-lcs (1.3) + diplomat (2.0.2) + faraday (~> 0.9) + json + docile (1.1.5) + faraday (0.15.4) + multipart-post (>= 1.2, < 3) + faraday-http-cache (2.0.0) + faraday (~> 0.8) + ffi (1.9.25) + ffi (1.9.25-java) + hashdiff (0.3.7) + hitimes (1.3.0) + hitimes (1.3.0-java) + http_tools (0.4.5) + jmespath (1.4.0) + json (1.8.6) + json (1.8.6-java) + listen (3.1.5) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + ruby_dep (~> 1.2) + multipart-post (2.0.0) + net-http-persistent (2.9.4) + rake (10.5.0) + rb-fsevent (0.10.3) + rb-inotify (0.9.10) + ffi (>= 0.5.0, < 2) + redis (3.3.5) + rspec (3.7.0) + rspec-core (~> 3.7.0) + rspec-expectations (~> 3.7.0) + rspec-mocks (~> 3.7.0) + rspec-core (3.7.1) + rspec-support (~> 3.7.0) + rspec-expectations (3.7.0) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.7.0) + rspec-mocks (3.7.0) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.7.0) + rspec-support (3.7.0) + rspec_junit_formatter (0.3.0) + rspec-core (>= 2, < 4, != 2.12.0) + ruby_dep (1.5.0) + semantic (1.6.1) + simplecov (0.15.1) + docile (~> 1.1.0) + json (>= 1.8, < 3) + simplecov-html (~> 0.10.0) + simplecov-html (0.10.2) + socketry (0.5.1) + hitimes (~> 1.2) + thread_safe (0.3.6) + thread_safe (0.3.6-java) + timecop (0.9.1) + +PLATFORMS + java + ruby + +DEPENDENCIES + aws-sdk-dynamodb (~> 1.18) + bundler (~> 1.7) + codeclimate-test-reporter (~> 0) + connection_pool (>= 2.1.2) + diplomat (>= 2.0.2) + ldclient-rb! + listen (~> 3.0) + rake (~> 10.0) + redis (~> 3.3.5) + rspec (~> 3.2) + rspec_junit_formatter (~> 0.3.0) + timecop (~> 0.9.1) + +BUNDLED WITH + 1.17.1 From 3b5b08e2f61243f28748c59f6722ac1a914481c8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:42:24 -0800 Subject: [PATCH 078/292] update lockfile --- Gemfile.lock | 31 ++++--------------------------- 1 file changed, 4 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 17c5725e..6c4673e4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,40 +2,23 @@ PATH remote: . specs: ldclient-rb (5.4.1) - concurrent-ruby (~> 1.0.4) + concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) - hashdiff (~> 0.2) http_tools (~> 0.4.5) json (>= 1.8, < 3) net-http-persistent (~> 2.9) - semantic (~> 1.6.0) + semantic (~> 1.6) socketry (~> 0.5.1) - thread_safe (~> 0.3) GEM remote: https://rubygems.org/ specs: - aws-eventstream (1.0.1) - aws-partitions (1.125.0) - aws-sdk-core (3.44.0) - aws-eventstream (~> 1.0) - aws-partitions (~> 1.0) - aws-sigv4 (~> 1.0) - jmespath (~> 1.0) - aws-sdk-dynamodb (1.18.0) - aws-sdk-core (~> 3, >= 3.39.0) - aws-sigv4 (~> 1.0) - aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.0.5) - concurrent-ruby (1.0.5-java) + concurrent-ruby (1.1.4) connection_pool (2.2.1) diff-lcs (1.3) - diplomat (2.0.2) - faraday (~> 0.9) - json docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) @@ -43,11 +26,9 @@ GEM faraday (~> 0.8) ffi (1.9.25) ffi (1.9.25-java) - hashdiff (0.3.7) hitimes (1.3.0) hitimes (1.3.0-java) http_tools (0.4.5) - jmespath (1.4.0) json (1.8.6) json (1.8.6-java) listen (3.1.5) @@ -85,8 +66,6 @@ GEM simplecov-html (0.10.2) socketry (0.5.1) hitimes (~> 1.2) - thread_safe (0.3.6) - thread_safe (0.3.6-java) timecop (0.9.1) PLATFORMS @@ -94,11 +73,9 @@ PLATFORMS ruby DEPENDENCIES - aws-sdk-dynamodb (~> 1.18) bundler (~> 1.7) codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) - diplomat (>= 2.0.2) ldclient-rb! listen (~> 3.0) rake (~> 10.0) @@ -108,4 +85,4 @@ DEPENDENCIES timecop (~> 0.9.1) BUNDLED WITH - 1.17.1 + 1.17.3 From 8656f258d42eb16c612181eb01a6b5e6ebebf225 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 12:52:42 -0800 Subject: [PATCH 079/292] use ruby-eventsource --- Gemfile.lock | 7 +++++-- lib/ldclient-rb/stream.rb | 4 ++-- spec/stream_spec.rb | 16 ++++++++-------- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 695aaadc..f376fb32 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -5,11 +5,10 @@ PATH concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) - http_tools (~> 0.4.5) json (>= 1.8, < 3) + ld-eventsource (~> 1.0) net-http-persistent (~> 2.9) semantic (~> 1.6) - socketry (~> 0.5.1) GEM remote: https://rubygems.org/ @@ -43,6 +42,10 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) + ld-eventsource (1.0.0) + concurrent-ruby (~> 1.0) + http_tools (~> 0.4.5) + socketry (~> 0.5.1) listen (3.1.5) rb-fsevent (~> 0.9, >= 0.9.4) rb-inotify (~> 0.9, >= 0.9.7) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index adc4bf59..e4f1b3bd 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -54,11 +54,11 @@ def start read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } - @es = LaunchDarklySSE::SSEClient.new(@config.stream_uri + "/all", **opts) do |conn| + @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| conn.on_event { |event| process_message(event) } conn.on_error { |err| case err - when LaunchDarklySSE::HTTPError + when SSE::Errors::HTTPError status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 0ab9d3ec..648833ff 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -8,13 +8,13 @@ let(:processor) { subject.new("sdk_key", config, requestor) } describe '#process_message' do - let(:put_message) { LaunchDarklySSE::StreamEvent.new(type: :put, data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } - let(:patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } - let(:patch_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } - let(:delete_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/flags/key", "version": 2}') } - let(:delete_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/segments/key", "version": 2}') } - let(:indirect_patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/put', data: "/flags/key") } - let(:indirect_patch_segment_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/patch', data: "/segments/key") } + let(:put_message) { SSE::StreamEvent.new(:put, '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } + let(:patch_flag_message) { SSE::StreamEvent.new(:patch, '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } + let(:patch_seg_message) { SSE::StreamEvent.new(:patch, '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } + let(:delete_flag_message) { SSE::StreamEvent.new(:delete, '{"path": "/flags/key", "version": 2}') } + let(:delete_seg_message) { SSE::StreamEvent.new(:delete, '{"path": "/segments/key", "version": 2}') } + let(:indirect_patch_flag_message) { SSE::StreamEvent.new(:'indirect/patch', "/flags/key") } + let(:indirect_patch_segment_message) { SSE::StreamEvent.new(:'indirect/patch', "/segments/key") } it "will accept PUT methods" do processor.send(:process_message, put_message) @@ -53,7 +53,7 @@ end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn - processor.send(:process_message, LaunchDarklySSE::StreamEvent.new(type: :get, data: "", id: nil)) + processor.send(:process_message, SSE::StreamEvent.new(type: :get, data: "", id: nil)) end end end From 1ced67ef78b84e0ff74bf0b8f791de45782f1d6e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:19:35 -0800 Subject: [PATCH 080/292] uninstall unwanted bundler upgrade in CI --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0e285fcc..26dd2cb7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -88,6 +88,8 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + # bundler 2.0 may be preinstalled, we need to remove it if so + yes | gem uninstall bundler --version '>=2.0' || true; gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From e11bf4b05cced0e29dbf24daabd08922f8b9ba84 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:30:19 -0800 Subject: [PATCH 081/292] allow net-http-persistent 3.x --- Gemfile.lock | 7 ++++--- ldclient-rb.gemspec | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 6c4673e4..28f15ccf 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,13 @@ PATH remote: . specs: - ldclient-rb (5.4.1) + ldclient-rb (5.4.2) concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) http_tools (~> 0.4.5) json (>= 1.8, < 3) - net-http-persistent (~> 2.9) + net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) socketry (~> 0.5.1) @@ -36,7 +36,8 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - net-http-persistent (2.9.4) + net-http-persistent (3.0.0) + connection_pool (~> 2.2) rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 46dac190..4e96b6b4 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -35,7 +35,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "net-http-persistent", "~> 2.9" + spec.add_runtime_dependency "net-http-persistent", [">= 2.9", "< 4.0"] spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" From cb2193e5c25a1c1c52fd426413c323914f873f15 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:19:35 -0800 Subject: [PATCH 082/292] uninstall unwanted bundler upgrade in CI --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85f6f7cf..6fb11b32 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,6 +80,8 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + # bundler 2.0 may be preinstalled, we need to remove it if so + yes | gem uninstall bundler --version '>=2.0' || true; gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 3f4e432c3f892e980d300e1ea4fbedcc32ebcc80 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:19:39 -0800 Subject: [PATCH 083/292] rewrite requestor without Faraday - don't have proxy yet --- lib/ldclient-rb/polling.rb | 3 +- lib/ldclient-rb/requestor.rb | 53 +++++---- spec/http_util.rb | 103 +++++++++++++++++ spec/requestor_spec.rb | 208 +++++++++++++++++++++++++++-------- 4 files changed, 302 insertions(+), 65 deletions(-) create mode 100644 spec/http_util.rb diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 4c6769f3..17ff7c12 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -63,8 +63,7 @@ def create_worker stop end rescue StandardError => exn - @config.logger.error { "[LDClient] Exception while polling: #{exn.inspect}" } - # TODO: log_exception(__method__.to_s, exn) + Util.log_exception(@config.logger, "Exception while polling", exn) end delta = @config.poll_interval - (Time.now - started_at) if delta > 0 diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 3e244fbe..739ea277 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,6 +1,6 @@ +require "concurrent/atomics" require "json" require "net/http/persistent" -require "faraday/http_cache" module LaunchDarkly # @private @@ -16,14 +16,15 @@ def status # @private class Requestor + CacheEntry = Struct.new(:etag, :body) + def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Faraday.new do |builder| - builder.use :http_cache, store: @config.cache_store - - builder.adapter :net_http_persistent - end + @client = Net::HTTP::Persistent.new + @client.open_timeout = @config.connect_timeout + @client.read_timeout = @config.read_timeout + @cache = @config.cache_store end def request_flag(key) @@ -39,24 +40,38 @@ def request_all_data() end def make_request(path) - uri = @config.base_uri + path - res = @client.get (uri) do |req| - req.headers["Authorization"] = @sdk_key - req.headers["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req.options.timeout = @config.read_timeout - req.options.open_timeout = @config.connect_timeout - if @config.proxy - req.options.proxy = Faraday::ProxyOptions.from @config.proxy - end + uri = URI(@config.base_uri + path) + req = Net::HTTP::Get.new(uri) + req["Authorization"] = @sdk_key + req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + cached = @cache.read(uri) + if !cached.nil? + req["If-None-Match"] = cached.etag end + # if @config.proxy + # req.options.proxy = Faraday::ProxyOptions.from @config.proxy + # end - @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{res.status}\n\theaders: #{res.headers}\n\tbody: #{res.body}" } + res = @client.request(uri, req) + status = res.code.to_i + @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } - if res.status < 200 || res.status >= 300 - raise UnexpectedResponseError.new(res.status) + if status == 304 && !cached.nil? + body = cached.body + else + @cache.delete(uri) + if status < 200 || status >= 300 + raise UnexpectedResponseError.new(status) + end + body = res.body + etag = res["etag"] + @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end + JSON.parse(body, symbolize_names: true) + end - JSON.parse(res.body, symbolize_names: true) + def stop + @client.shutdown end private :make_request diff --git a/spec/http_util.rb b/spec/http_util.rb new file mode 100644 index 00000000..764f8e48 --- /dev/null +++ b/spec/http_util.rb @@ -0,0 +1,103 @@ +require "webrick" +require "webrick/httpproxy" +require "webrick/https" + +class StubHTTPServer + attr_reader :requests + + @@next_port = 50000 + + def initialize + @port = StubHTTPServer.next_port + begin + base_opts = { + BindAddress: '127.0.0.1', + Port: @port, + AccessLog: [], + Logger: NullLogger.new, + RequestCallback: method(:record_request) + } + @server = create_server(@port, base_opts) + rescue Errno::EADDRINUSE + @port = StubHTTPServer.next_port + retry + end + @requests = [] + end + + def self.next_port + p = @@next_port + @@next_port = (p + 1 < 60000) ? p + 1 : 50000 + p + end + + def create_server(port, base_opts) + WEBrick::HTTPServer.new(base_opts) + end + + def start + Thread.new { @server.start } + end + + def stop + @server.shutdown + end + + def base_uri + URI("http://127.0.0.1:#{@port}") + end + + def setup_response(uri_path, &action) + @server.mount_proc(uri_path, action) + end + + def setup_ok_response(uri_path, body, content_type=nil, headers={}) + setup_response(uri_path) do |req, res| + res.status = 200 + res.content_type = content_type if !content_type.nil? + res.body = body + headers.each { |n, v| res[n] = v } + end + end + + def record_request(req, res) + @requests.push(req) + end +end + +class StubProxyServer < StubHTTPServer + attr_reader :request_count + attr_accessor :connect_status + + def initialize + super + @request_count = 0 + end + + def create_server(port, base_opts) + WEBrick::HTTPProxyServer.new(base_opts.merge({ + ProxyContentHandler: proc do |req,res| + if !@connect_status.nil? + res.status = @connect_status + end + @request_count += 1 + end + })) + end +end + +class NullLogger + def method_missing(*) + self + end +end + +def with_server(server = nil) + server = StubHTTPServer.new if server.nil? + begin + server.start + yield server + ensure + server.stop + end +end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index b7838200..3d4a666f 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,52 +1,172 @@ require "spec_helper" -require "faraday" +require "http_util" + +$sdk_key = "secret" describe LaunchDarkly::Requestor do - describe ".request_all_flags" do - describe "with a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :proxy => "http://proxy.com", - :base_uri => "http://ld.com" + def with_requestor(base_uri) + r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new(base_uri: base_uri)) + yield r + r.stop + end + + describe "request_all_flags" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_all_data() + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-all" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] + }) + end + end + end + + it "parses response" do + expected_data = { flags: { x: { key: "x" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", expected_data.to_json) + data = requestor.request_all_data() + expect(data).to eq expected_data + end + end + end + + it "sends etag from previous response" do + etag = "xyz" + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = "{}" + res["ETag"] = etag + end + requestor.request_all_data() + expect(server.requests.count).to eq 1 + + requestor.request_all_data() + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) + end + end + end + + it "can reuse cached data" do + etag = "xyz" + expected_data = { flags: { x: { key: "x" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data.to_json + res["ETag"] = etag + end + requestor.request_all_data() + expect(server.requests.count).to eq 1 + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) + expect(data).to eq expected_data + end + end + end + + it "replaces cached data with new data" do + etag1 = "abc" + etag2 = "xyz" + expected_data1 = { flags: { x: { key: "x" } } } + expected_data2 = { flags: { y: { key: "y" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data1.to_json + res["ETag"] = etag1 + end + data = requestor.request_all_data() + expect(data).to eq expected_data1 + expect(server.requests.count).to eq 1 + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(data).to eq expected_data1 + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag1 ] }) + + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data2.to_json + res["ETag"] = etag2 + end + data = requestor.request_all_data() + expect(data).to eq expected_data2 + expect(server.requests.count).to eq 3 + expect(server.requests[2].header).to include({ "if-none-match" => [ etag1 ] }) + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(data).to eq expected_data2 + expect(server.requests.count).to eq 4 + expect(server.requests[3].header).to include({ "if-none-match" => [ etag2 ] }) + end + end + end + + it "throws exception for error status" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 400 + end + expect { requestor.request_all_data() }.to raise_error(LaunchDarkly::UnexpectedResponseError) + end + end + end + end + + describe "request_flag" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_flag("key") + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-flags/key" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy[:uri]).to eq URI("http://proxy.com") - double(body: '{"foo": "bar"}', status: 200, headers: {}) - end - - requestor.request_all_data() - end - end - describe "without a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :base_uri => "http://ld.com" + end + end + end + end + + describe "request_segment" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_segment("key") + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-segments/key" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy).to eq nil - double(body: '{"foo": "bar"}', status: 200, headers: {}) - end - requestor.request_all_data() + end end end end From b250437df78c99c6c0774e72a5f75ca06c5adf4e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:21:32 -0800 Subject: [PATCH 084/292] reduce intermittent HTTP errors on stub server by not reusing ports --- spec/http_util.rb | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/spec/http_util.rb b/spec/http_util.rb index 434cafc8..764f8e48 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -1,4 +1,3 @@ -require "spec_helper" require "webrick" require "webrick/httpproxy" require "webrick/https" @@ -6,8 +5,10 @@ class StubHTTPServer attr_reader :requests + @@next_port = 50000 + def initialize - @port = 50000 + @port = StubHTTPServer.next_port begin base_opts = { BindAddress: '127.0.0.1', @@ -18,12 +19,18 @@ def initialize } @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE - @port += 1 + @port = StubHTTPServer.next_port retry end @requests = [] end + def self.next_port + p = @@next_port + @@next_port = (p + 1 < 60000) ? p + 1 : 50000 + p + end + def create_server(port, base_opts) WEBrick::HTTPServer.new(base_opts) end From 5b4b8555073bcc8c684e0317c830aee9c8fd543f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:43:46 -0800 Subject: [PATCH 085/292] fix charset handling --- lib/ldclient-rb/requestor.rb | 30 ++++++++++++++++++++++++++---- spec/requestor_spec.rb | 23 +++++++++++++++++++++++ 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 739ea277..94683bcb 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -39,6 +39,12 @@ def request_all_data() make_request("/sdk/latest-all") end + def stop + @client.shutdown + end + + private + def make_request(path) uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) @@ -63,17 +69,33 @@ def make_request(path) if status < 200 || status >= 300 raise UnexpectedResponseError.new(status) end - body = res.body + body = fix_encoding(res.body, res["content-type"]) etag = res["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end JSON.parse(body, symbolize_names: true) end - def stop - @client.shutdown + def fix_encoding(body, content_type) + return body if content_type.nil? + media_type, charset = parse_content_type(content_type) + return body if charset.nil? + body.force_encoding(Encoding::find(charset)).encode(Encoding::UTF_8) end - private :make_request + def parse_content_type(value) + return [nil, nil] if value.nil? || value == '' + parts = value.split(/; */) + return [value, nil] if parts.count < 2 + charset = nil + parts.each do |part| + fields = part.split('=') + if fields.count >= 2 && fields[0] == 'charset' + charset = fields[1] + break + end + end + return [parts[0], charset] + end end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3d4a666f..3cc20991 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -125,6 +125,29 @@ def with_requestor(base_uri) end end + it "uses UTF-8 encoding by default" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json") + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + end + end + + it "detects other encodings from Content-Type" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["proszę", "dziękuję"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content.encode(Encoding::ISO_8859_2), + "text/plain; charset=ISO-8859-2") + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + end + end + it "throws exception for error status" do with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| From 7a7c273764921f9f33ed3fd3953b1d4a2aacb8cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:59:32 -0800 Subject: [PATCH 086/292] add test for proxy support, remove obsolete property --- lib/ldclient-rb/config.rb | 15 --------------- lib/ldclient-rb/requestor.rb | 5 +---- spec/requestor_spec.rb | 18 ++++++++++++++++++ 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 64ad7378..c14f59c8 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -53,7 +53,6 @@ def initialize(opts = {}) @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @poll_interval = opts.has_key?(:poll_interval) && opts[:poll_interval] > Config.default_poll_interval ? opts[:poll_interval] : Config.default_poll_interval - @proxy = opts[:proxy] || Config.default_proxy @all_attributes_private = opts[:all_attributes_private] || false @private_attribute_names = opts[:private_attribute_names] || [] @send_events = opts.has_key?(:send_events) ? opts[:send_events] : Config.default_send_events @@ -184,12 +183,6 @@ def offline? # attr_reader :feature_store - # - # The proxy configuration string. - # @return [String] - # - attr_reader :proxy - # # True if all user attributes (other than the key) should be considered private. This means # that the attribute values will not be sent to LaunchDarkly in analytics events and will not @@ -336,14 +329,6 @@ def self.default_connect_timeout 2 end - # - # The default value for {#proxy}. - # @return [String] nil - # - def self.default_proxy - nil - end - # # The default value for {#logger}. # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 94683bcb..5f48d7ff 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -7,6 +7,7 @@ module LaunchDarkly class UnexpectedResponseError < StandardError def initialize(status) @status = status + super("HTTP error #{status}") end def status @@ -54,10 +55,6 @@ def make_request(path) if !cached.nil? req["If-None-Match"] = cached.etag end - # if @config.proxy - # req.options.proxy = Faraday::ProxyOptions.from @config.proxy - # end - res = @client.request(uri, req) status = res.code.to_i @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3cc20991..eb6c2b62 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -158,6 +158,24 @@ def with_requestor(base_uri) end end end + + it "can use a proxy server" do + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + ensure + ENV["http_proxy"] = nil + end + end + end + end end describe "request_flag" do From 1502e61d05406ee51e2007e9a429bdbd67126f64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 19:16:49 -0800 Subject: [PATCH 087/292] rm duplicate test code --- spec/sse_client/sse_client_spec.rb | 2 +- spec/sse_client/sse_shared.rb | 82 -------------------------- spec/sse_client/streaming_http_spec.rb | 2 +- 3 files changed, 2 insertions(+), 84 deletions(-) delete mode 100644 spec/sse_client/sse_shared.rb diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb index 54f1f5c7..71e96112 100644 --- a/spec/sse_client/sse_client_spec.rb +++ b/spec/sse_client/sse_client_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of SSEClient against a real server diff --git a/spec/sse_client/sse_shared.rb b/spec/sse_client/sse_shared.rb deleted file mode 100644 index 3ecabb57..00000000 --- a/spec/sse_client/sse_shared.rb +++ /dev/null @@ -1,82 +0,0 @@ -require "spec_helper" -require "webrick" -require "webrick/httpproxy" -require "webrick/https" - -class StubHTTPServer - def initialize - @port = 50000 - begin - @server = create_server(@port) - rescue Errno::EADDRINUSE - @port += 1 - retry - end - end - - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) - end - - def start - Thread.new { @server.start } - end - - def stop - @server.shutdown - end - - def base_uri - URI("http://127.0.0.1:#{@port}") - end - - def setup_response(uri_path, &action) - @server.mount_proc(uri_path, action) - end -end - -class StubProxyServer < StubHTTPServer - attr_reader :request_count - attr_accessor :connect_status - - def initialize - super - @request_count = 0 - end - - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, - ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? - res.status = @connect_status - end - @request_count += 1 - end - ) - end -end - -class NullLogger - def method_missing(*) - self - end -end - -def with_server(server = nil) - server = StubHTTPServer.new if server.nil? - begin - server.start - yield server - ensure - server.stop - end -end diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb index 7dfac9bd..136a727a 100644 --- a/spec/sse_client/streaming_http_spec.rb +++ b/spec/sse_client/streaming_http_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of HTTP requests against a real server From 485784240ffe4c747b1e60eb93ee70dbaa8b0055 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 19:58:28 -0800 Subject: [PATCH 088/292] change event sending to use Net::HTTP; completely remove Faraday --- ldclient-rb.gemspec | 2 - lib/ldclient-rb/cache_store.rb | 6 +-- lib/ldclient-rb/config.rb | 7 ++-- lib/ldclient-rb/events.rb | 44 +++++++++++--------- spec/events_spec.rb | 73 +++++++++++++++++++++++++--------- spec/http_util.rb | 6 +++ 6 files changed, 92 insertions(+), 46 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9d541c18..15c20739 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -33,8 +33,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb spec.add_runtime_dependency "json", [">= 1.8", "< 3"] - spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] - spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "net-http-persistent", "~> 2.9" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 8451bb5f..b91b363d 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -2,11 +2,9 @@ module LaunchDarkly # - # A thread-safe in-memory store suitable for use with the Faraday caching HTTP client. Uses the - # concurrent-ruby gem's Map as the underlying cache. + # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we + # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. # - # @see https://github.com/plataformatec/faraday-http-cache - # @see https://github.com/ruby-concurrency # @private # class ThreadSafeMemoryStore diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c14f59c8..e5217f45 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -152,9 +152,10 @@ def offline? attr_reader :capacity # - # A store for HTTP caching. This must support the semantics used by the - # [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem. Defaults - # to the Rails cache in a Rails environment, or a thread-safe in-memory store otherwise. + # A store for HTTP caching (used only in polling mode). This must support the semantics used by + # the [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem, although + # the SDK no longer uses Faraday. Defaults to the Rails cache in a Rails environment, or a + # thread-safe in-memory store otherwise. # @return [Object] # attr_reader :cache_store diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index cbae5ac5..02885904 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,9 +1,9 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" +require "net/http/persistent" require "thread" require "time" -require "faraday" module LaunchDarkly MAX_FLUSH_WORKERS = 5 @@ -115,7 +115,12 @@ class EventDispatcher def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @config = config - @client = client ? client : Faraday.new + + @client = client ? client : Net::HTTP::Persistent.new do |c| + c.open_timeout = @config.connect_timeout + c.read_timeout = @config.read_timeout + end + @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @@ -162,7 +167,7 @@ def main_loop(queue, buffer, flush_workers) def do_shutdown(flush_workers) flush_workers.shutdown flush_workers.wait_for_termination - # There seems to be no such thing as "close" in Faraday: https://github.com/lostisland/faraday/issues/241 + @client.shutdown end def synchronize_for_testing(flush_workers) @@ -246,16 +251,17 @@ def trigger_flush(buffer, flush_workers) end def handle_response(res) - if res.status >= 400 - message = Util.http_error_message(res.status, "event delivery", "some events were dropped") + status = res.code.to_i + if status >= 400 + message = Util.http_error_message(status, "event delivery", "some events were dropped") @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(res.status) + if !Util.http_error_recoverable?(status) @disabled.value = true end else - if !res.headers.nil? && res.headers.has_key?("Date") + if !res["date"].nil? begin - res_time = (Time.httpdate(res.headers["Date"]).to_f * 1000).to_i + res_time = (Time.httpdate(res["date"]).to_f * 1000).to_i @last_known_past_time.value = res_time rescue ArgumentError end @@ -317,21 +323,21 @@ def run(sdk_key, config, client, payload, formatter) end begin config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } - res = client.post (config.events_uri + "/bulk") do |req| - req.headers["Authorization"] = sdk_key - req.headers["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req.headers["Content-Type"] = "application/json" - req.headers["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req.body = body - req.options.timeout = config.read_timeout - req.options.open_timeout = config.connect_timeout - end + uri = URI(config.events_uri + "/bulk") + req = Net::HTTP::Post.new(uri) + req.content_type = "application/json" + req.body = body + req["Authorization"] = sdk_key + req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + res = client.request(uri, req) rescue StandardError => exn config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } next end - if res.status < 200 || res.status >= 300 - if Util.http_error_recoverable?(res.status) + status = res.code.to_i + if status < 200 || status >= 300 + if Util.http_error_recoverable?(status) next end end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 56bd14a2..86cc67b6 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -1,5 +1,5 @@ +require "http_util" require "spec_helper" -require "faraday" require "time" describe LaunchDarkly::EventProcessor do @@ -348,7 +348,7 @@ @ep.flush @ep.wait_until_inactive - expect(hc.get_request.headers["Authorization"]).to eq "sdk_key" + expect(hc.get_request["authorization"]).to eq "sdk_key" end def verify_unrecoverable_http_error(status) @@ -414,7 +414,7 @@ def verify_recoverable_http_error(status) e = { kind: "identify", user: user } @ep.add_event(e) - hc.set_exception(Faraday::Error::ConnectionFailed.new("fail")) + hc.set_exception(IOError.new("deliberate error")) @ep.flush @ep.wait_until_inactive @@ -423,6 +423,46 @@ def verify_recoverable_http_error(status) expect(hc.get_request).to be_nil # no 3rd request end + it "makes actual HTTP request with correct headers" do + e = { kind: "identify", key: user[:key], user: user } + with_server do |server| + server.setup_ok_response("/bulk", "") + + @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) + @ep.add_event(e) + @ep.flush + + req = server.await_request + expect(req.header).to include({ + "authorization" => [ "sdk_key" ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "x-launchdarkly-event-schema" => [ "3" ] + }) + end + end + + it "can use a proxy server" do + e = { kind: "identify", key: user[:key], user: user } + with_server do |server| + server.setup_ok_response("/bulk", "") + + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) + @ep.add_event(e) + @ep.flush + + req = server.await_request + expect(req["content-type"]).to eq("application/json") + ensure + ENV["http_proxy"] = nil + end + end + end + end + def index_event(e, user) { kind: "index", @@ -496,38 +536,35 @@ def reset @status = 200 end - def post(uri) - req = Faraday::Request.create("POST") - req.headers = {} - req.options = Faraday::RequestOptions.new - yield req + def request(uri, req) @requests.push(req) if @exception raise @exception else - resp = Faraday::Response.new headers = {} if @server_time headers["Date"] = @server_time.httpdate end - resp.finish({ - status: @status ? @status : 200, - response_headers: headers - }) - resp + FakeResponse.new(@status ? @status : 200, headers) end end def get_request @requests.shift end + + def shutdown + end end class FakeResponse - def initialize(status) - @status = status - end + include Net::HTTPHeader - attr_reader :status + attr_reader :code + + def initialize(status, headers) + @code = status.to_s + initialize_http_header(headers) + end end end diff --git a/spec/http_util.rb b/spec/http_util.rb index 764f8e48..e43e2ded 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -23,6 +23,7 @@ def initialize retry end @requests = [] + @requests_queue = Queue.new end def self.next_port @@ -62,6 +63,11 @@ def setup_ok_response(uri_path, body, content_type=nil, headers={}) def record_request(req, res) @requests.push(req) + @requests_queue << req + end + + def await_request + @requests_queue.pop end end From d658715b420ee029d85b442f643785a759aa4d5c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 8 Jan 2019 20:42:57 -0800 Subject: [PATCH 089/292] implement dependency ordering for feature store data --- lib/ldclient-rb/impl/store_client_wrapper.rb | 47 ++++++++++++++ lib/ldclient-rb/impl/store_data_set_sorter.rb | 57 +++++++++++++++++ lib/ldclient-rb/in_memory_store.rb | 13 +++- .../integrations/util/store_wrapper.rb | 5 ++ lib/ldclient-rb/interfaces.rb | 5 ++ lib/ldclient-rb/ldclient.rb | 14 +++- spec/ldclient_spec.rb | 64 +++++++++++++++++++ 7 files changed, 200 insertions(+), 5 deletions(-) create mode 100644 lib/ldclient-rb/impl/store_client_wrapper.rb create mode 100644 lib/ldclient-rb/impl/store_data_set_sorter.rb diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb new file mode 100644 index 00000000..f0948251 --- /dev/null +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -0,0 +1,47 @@ +require "ldclient-rb/interfaces" +require "ldclient-rb/impl/store_data_set_sorter" + +module LaunchDarkly + module Impl + # + # Provides additional behavior that the client requires before or after feature store operations. + # Currently this just means sorting the data set for init(). In the future we may also use this + # to provide an update listener capability. + # + class FeatureStoreClientWrapper + include Interfaces::FeatureStore + + def initialize(store) + @store = store + end + + def init(all_data) + @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) + end + + def get(kind, key) + @store.get(kind, key) + end + + def all(kind) + @store.all(kind) + end + + def upsert(kind, item) + @store.upsert(kind, item) + end + + def delete(kind, key, version) + @store.delete(kind, key, version) + end + + def initialized? + @store.initialized? + end + + def stop + @store.stop + end + end + end +end diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb new file mode 100644 index 00000000..4f3635cd --- /dev/null +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -0,0 +1,57 @@ + +module LaunchDarkly + module Impl + # + # Implements a dependency graph ordering for data to be stored in a feature store. We must use this + # on every data set that will be passed to the feature store's init() method. + # + class FeatureStoreDataSetSorter + # + # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer + # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each + # data kind that has a :get_dependency_keys function, the inner hash will have an iteration order + # where B is before A if A has a dependency on B. + # + # This implementation relies on the fact that hashes in Ruby have an iteration order that is the same + # as the insertion order. Also, due to the way we deserialize JSON received from LaunchDarkly, the + # keys in the inner hash will always be symbols. + # + def self.sort_all_collections(all_data) + outer_hash = {} + kinds = all_data.keys.sort_by { |k| + k[:priority].nil? ? k[:namespace].length : k[:priority] # arbitrary order if priority is unknown + } + kinds.each do |kind| + items = all_data[kind] + outer_hash[kind] = self.sort_collection(kind, items) + end + outer_hash + end + + def self.sort_collection(kind, input) + dependency_fn = kind[:get_dependency_keys] + return input if dependency_fn.nil? || input.empty? + remaining_items = input.clone + items_out = {} + while !remaining_items.empty? + # pick a random item that hasn't been updated yet + remaining_items.each do |key, item| + self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + break + end + end + items_out + end + + def self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + item_key = item[:key].to_sym + remaining_items.delete(item_key) # we won't need to visit this item again + dependency_fn.call(item).each do |dep_key| + dep_item = remaining_items[dep_key.to_sym] + self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) if !dep_item.nil? + end + items_out[item_key] = item + end + end + end +end diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 4814c85d..c959f399 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -6,12 +6,21 @@ module LaunchDarkly # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. + # + # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter + # to ensure data consistency during non-atomic updates. + + # @private FEATURES = { - namespace: "features" + namespace: "features", + priority: 1, # that is, features should be stored after segments + get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } } }.freeze + # @private SEGMENTS = { - namespace: "segments" + namespace: "segments", + priority: 0 }.freeze # diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 46a648c1..eef22d5e 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -151,6 +151,11 @@ module FeatureStoreCore # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities # @return [void] diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 912472b5..b6920fb5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -33,6 +33,11 @@ module FeatureStore # date-- there is no need to perform individual version comparisons between the existing # objects and the supplied features. # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities # @return [void] diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 868c65bd..d9a09c65 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" require "logger" @@ -23,8 +24,15 @@ class LDClient # @return [LDClient] The LaunchDarkly client instance def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key - @config = config - @store = config.feature_store + + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add + # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses + # the feature store through the Config object, so we need to make a new Config that uses + # the wrapped store. + @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) + updated_config = config.clone + updated_config.instance_variable_set(:@feature_store, @store) + @config = updated_config if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new @@ -39,7 +47,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) data_source_or_factory = @config.data_source || self.method(:create_default_data_source) if data_source_or_factory.respond_to? :call - @data_source = data_source_or_factory.call(sdk_key, config) + @data_source = data_source_or_factory.call(sdk_key, @config) else @data_source = data_source_or_factory end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index b3a9592c..453f4b53 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -375,4 +375,68 @@ def event_processor expect(ep).not_to be_a(LaunchDarkly::NullEventProcessor) end end + + describe "feature store data ordering" do + let(:dependency_ordering_test_data) { + { + LaunchDarkly::FEATURES => { + a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, + b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, + c: { key: "c" }, + d: { key: "d" }, + e: { key: "e" }, + f: { key: "f" } + }, + LaunchDarkly::SEGMENTS => { + o: { key: "o" } + } + } + } + + class FakeFeatureStore + attr_reader :received_data + + def init(all_data) + @received_data = all_data + end + end + + class FakeUpdateProcessor + def initialize(store, data) + @store = store + @data = data + end + + def start + @store.init(@data) + ev = Concurrent::Event.new + ev.set + ev + end + + def stop + end + + def initialized? + true + end + end + + it "passes data set to feature store in correct order on init" do + store = FakeFeatureStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, + dependency_ordering_test_data) } + config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) + client = subject.new("secret", config) + + data = store.received_data + expect(data).not_to be_nil + expect(data.count).to eq(2) + + puts(data) + + # Segments should always come first + expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) + end + end end \ No newline at end of file From ed302ad79bd63c2197eadff8c2e9ce410eed54c1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 8 Jan 2019 22:17:51 -0800 Subject: [PATCH 090/292] fix incomplete test --- spec/ldclient_spec.rb | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 453f4b53..fca81ab0 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -432,11 +432,26 @@ def initialized? data = store.received_data expect(data).not_to be_nil expect(data.count).to eq(2) - - puts(data) # Segments should always come first expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) + expect(data.values[0].count).to eq(dependency_ordering_test_data[LaunchDarkly::SEGMENTS].count) + + # Features should be ordered so that a flag always appears after its prerequisites, if any + expect(data.keys[1]).to be(LaunchDarkly::FEATURES) + flags_map = data.values[1] + flags_list = flags_map.values + expect(flags_list.count).to eq(dependency_ordering_test_data[LaunchDarkly::FEATURES].count) + flags_list.each_with_index do |item, item_index| + (item[:prerequisites] || []).each do |prereq| + prereq = flags_map[prereq[:key].to_sym] + prereq_index = flags_list.index(prereq) + if prereq_index > item_index + all_keys = (flags_list.map { |f| f[:key] }).join(", ") + raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + end + end + end end end end \ No newline at end of file From 8436be4f1e6ad7f1ff48208969fb4d6e8e73ff61 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 12:00:51 -0800 Subject: [PATCH 091/292] use Hash.first --- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4f3635cd..4454fe75 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -35,10 +35,8 @@ def self.sort_collection(kind, input) items_out = {} while !remaining_items.empty? # pick a random item that hasn't been updated yet - remaining_items.each do |key, item| - self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) - break - end + key, item = remaining_items.first + self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) end items_out end From e84fbe7ab74894cc4ba600b05d043cc949eae21b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 12:58:47 -0800 Subject: [PATCH 092/292] add test for Unicode in feature store serialization --- spec/feature_store_spec_base.rb | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 0e0f1ca9..3580a67f 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -109,4 +109,14 @@ def new_version_plus(f, deltaVersion, attrs = {}) store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] - 1) expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 end + + it "stores Unicode data correctly" do + flag = { + key: "tęst-feåtūre-flæg😺", + version: 1, + deleted: false + } + store.upsert(LaunchDarkly::FEATURES, flag) + expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + end end From 89209237faa8f29b2b063839a38c491a7bafda40 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:38:43 -0800 Subject: [PATCH 093/292] fill in the rest of the Consul implementation --- Gemfile.lock | 6 +- .../impl/integrations/consul_impl.rb | 84 ++++++++++++------- 2 files changed, 57 insertions(+), 33 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 72158223..2e96a86a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -6,6 +6,7 @@ PATH faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) json (>= 1.8, < 3) + ld-eventsource (~> 1.0) net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) @@ -28,6 +29,9 @@ GEM concurrent-ruby (1.1.4) connection_pool (2.2.1) diff-lcs (1.3) + diplomat (2.0.2) + faraday (~> 0.9) + json docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) @@ -36,7 +40,6 @@ GEM ffi (1.9.25) ffi (1.9.25-java) hitimes (1.3.0) - hitimes (1.3.0-java) http_tools (0.4.5) jmespath (1.4.0) json (1.8.6) @@ -92,6 +95,7 @@ DEPENDENCIES bundler (~> 1.7) codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) + diplomat (>= 2.0.2) ldclient-rb! listen (~> 3.0) rake (~> 10.0) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 48d308c2..5044f33c 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -22,15 +22,15 @@ def initialize(opts) @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger - @client = Diplomat::Kv.new(configuration: opts[:consul_config]) - + Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) # Start by reading the existing keys; we will later delete any of these that weren't in all_data. - unused_old_keys = set() - unused_old_keys.merge(@client.get(@prefix, keys: true, recurse: true)) + unused_old_keys = Set.new + keys = Diplomat::Kv.get(@prefix, { keys: true, recurse: true }, :return) + unused_old_keys.merge(keys) if keys != "" ops = [] num_items = 0 @@ -47,12 +47,12 @@ def init_internal(all_data) end # Now delete any previously existing items whose keys were not in the current data - unused_old_keys.each do |tuple| + unused_old_keys.each do |key| ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) end # Now set the special key that we check in initialized_internal? - ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => '' } }) + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => inited_key, 'Value' => '' } }) ConsulUtil.batch_operations(ops) @@ -60,56 +60,76 @@ def init_internal(all_data) end def get_internal(kind, key) - - resp = get_item_by_keys(namespace_for_kind(kind), key) - unmarshal_item(resp.item) + value = Diplomat::Kv.get(item_key(kind, key), {}, :return) # :return means "don't throw an error if not found" + (value.nil? || value == "") ? nil : JSON.parse(value, symbolize_names: true) end def get_all_internal(kind) items_out = {} - + results = Diplomat::Kv.get(kind_key(kind), { recurse: true }, :return) + (results == "" ? [] : results).each do |result| + value = result[:value] + if !value.nil? + item = JSON.parse(value, symbolize_names: true) + items_out[item[:key].to_sym] = item + end + end items_out end def upsert_internal(kind, new_item) - + key = item_key(kind, new_item[:key]) + json = new_item.to_json + + # We will potentially keep retrying indefinitely until someone's write succeeds + while true + old_value = Diplomat::Kv.get(key, { decode_values: true }, :return) + if old_value.nil? || old_value == "" + mod_index = 0 + else + puts("old_value = #{old_value}") + old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) + # Check whether the item is stale. If so, don't do the update (and return the existing item to + # FeatureStoreWrapper so it can be cached) + if old_item[:version] >= new_item[:version] + return old_item + end + mod_index = old_value[0]["ModifyIndex"] + end + + # Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if + # the key's ModifyIndex is still equal to the previous value. If the previous ModifyIndex was zero, + # it means the key did not previously exist and the write will only succeed if it still doesn't exist. + success = Diplomat::Kv.put(key, json, cas: mod_index) + return new_item if success + + # If we failed, retry the whole shebang + @logger.debug { "Concurrent modification detected, retrying" } + end end def initialized_internal? - + value = Diplomat::Kv.get(inited_key, {}, :return) + !value.nil? && value != "" end def stop - # There's no way to close the Consul client + # There's no Consul client instance to dispose of end private def item_key(kind, key) - kind_key(kind) + '/' + key + kind_key(kind) + key.to_s end def kind_key(kind) - @prefix + kind[:namespace] + @prefix + kind[:namespace] + '/' end def inited_key @prefix + '$inited' end - - def marshal_item(kind, item) - make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ - VERSION_ATTRIBUTE => item[:version], - ITEM_JSON_ATTRIBUTE => item.to_json - }) - end - - def unmarshal_item(item) - return nil if item.nil? || item.length == 0 - json_attr = item[ITEM_JSON_ATTRIBUTE] - raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? - JSON.parse(json_attr, symbolize_names: true) - end end class ConsulUtil @@ -117,10 +137,10 @@ class ConsulUtil # Submits as many transactions as necessary to submit all of the given operations. # The ops array is consumed. # - def self.batch_write_requests(ops) - batch_size = 64 # Consul can only do this many at a time + def self.batch_operations(ops) + batch_size = 64 # Consul can only do this many at a time while true - chunk = requests.shift(batch_size) + chunk = ops.shift(batch_size) break if chunk.empty? Diplomat::Kv.txn(chunk) end From 21c79fe007ca47b2ff6b5fcc1bf1068a4baa1517 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:47:20 -0800 Subject: [PATCH 094/292] minor doc fixes --- README.md | 8 +++++--- lib/ldclient-rb/integrations/consul.rb | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 43819554..a194443f 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,8 @@ Note that this gem will automatically switch to using the Rails logger it is det HTTPS proxy ------------- +----------- + The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. How to set the HTTPS_PROXY environment variable on Mac/Linux systems: @@ -124,10 +125,11 @@ end Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Redis, DynamoDB, or Consul. These adapters are implemented in the `LaunchDarkly::Integrations::Redis`, `LaunchDarkly::Integrations::DynamoDB`, and `LaunchDarkly::Integrations::Consul` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [API documentation](https://www.rubydoc.info/gems/ldclient-rb/LaunchDarkly/Integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Using flag data from a file --------------------------- + For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. Learn more @@ -146,7 +148,7 @@ Contributing See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md) About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 7450d3b9..8f5d1f09 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -21,7 +21,7 @@ def self.default_prefix # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default - # Consul client configuration + # Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`) # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching From 7ea110fd0662cc835fb3fd007591806765763740 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:53:03 -0800 Subject: [PATCH 095/292] rm debugging --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 5044f33c..82a4fec9 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -87,7 +87,6 @@ def upsert_internal(kind, new_item) if old_value.nil? || old_value == "" mod_index = 0 else - puts("old_value = #{old_value}") old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) # Check whether the item is stale. If so, don't do the update (and return the existing item to # FeatureStoreWrapper so it can be cached) From 513618735575da8e177ccb871d835f9adaadefce Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 14:47:12 -0800 Subject: [PATCH 096/292] fix initialized check --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 82a4fec9..4082378f 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -108,8 +108,14 @@ def upsert_internal(kind, new_item) end def initialized_internal? - value = Diplomat::Kv.get(inited_key, {}, :return) - !value.nil? && value != "" + # Unfortunately we need to use exceptions here, instead of the :return parameter, because with + # :return there's no way to distinguish between a missing value and an empty string. + begin + Diplomat::Kv.get(inited_key, {}) + true + rescue Diplomat::KeyNotFound + false + end end def stop From 46ebc1f4a826a1dc13a8140c8b6cd3eceb09db6f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 15:38:19 -0800 Subject: [PATCH 097/292] improve feature store tests + minor Redis fixes --- .../impl/integrations/redis_impl.rb | 17 +- spec/feature_store_spec_base.rb | 232 ++++++++++++------ spec/in_memory_feature_store_spec.rb | 2 +- .../integrations/consul_feature_store_spec.rb | 16 +- .../dynamodb_feature_store_spec.rb | 33 ++- spec/redis_feature_store_spec.rb | 17 +- 6 files changed, 223 insertions(+), 94 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 497b01c5..107340f8 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -48,14 +48,15 @@ def initialize(opts) def init_internal(all_data) count = 0 with_connection do |redis| - all_data.each do |kind, items| - redis.multi do |multi| + redis.multi do |multi| + all_data.each do |kind, items| multi.del(items_key(kind)) count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } + items.each do |key, item| + multi.hset(items_key(kind), key, item.to_json) + end end + multi.set(inited_key, inited_key) end end @logger.info { "RedisFeatureStore: initialized with #{count} items" } @@ -112,7 +113,7 @@ def upsert_internal(kind, new_item) end def initialized_internal? - with_connection { |redis| redis.exists(items_key(FEATURES)) } + with_connection { |redis| redis.exists(inited_key) } end def stop @@ -135,6 +136,10 @@ def cache_key(kind, key) kind[:namespace] + ":" + key.to_s end + def inited_key + @prefix + ":$inited" + end + def with_connection @pool.with { |redis| yield(redis) } end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 0e0f1ca9..8689577f 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,112 +1,200 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method| +shared_examples "feature_store" do |create_store_method, clear_data_method| - let(:feature0) { + # Rather than testing with feature flag or segment data, we'll use this fake data kind + # to make it clear that feature stores need to be able to handle arbitrary data. + let(:things_kind) { { namespace: "things" } } + + let(:key1) { "thing1" } + let(:thing1) { { - key: "test-feature-flag", + key: key1, + name: "Thing 1", version: 11, - on: true, - prerequisites: [], - salt: "718ea30a918a4eba8734b57ab1a93227", - sel: "fe1244e5378c4f99976c9634e33667c6", - targets: [ - { - values: [ "alice" ], - variation: 0 - }, - { - values: [ "bob" ], - variation: 1 - } - ], - rules: [], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: [ true, false ], deleted: false } } - let(:key0) { feature0[:key].to_sym } + let(:unused_key) { "no" } + + let(:create_store) { create_store_method } # just to avoid a scope issue + let(:clear_data) { clear_data_method } + + def with_store(opts = {}) + s = create_store.call(opts) + begin + yield s + ensure + s.stop + end + end - let!(:store) do - s = create_store_method.call() - s.init(LaunchDarkly::FEATURES => { key0 => feature0 }) - s + def with_inited_store(things) + things_hash = {} + things.each { |thing| things_hash[thing[:key].to_sym] = thing } + + with_store do |s| + s.init({ things_kind => things_hash }) + yield s + end end def new_version_plus(f, deltaVersion, attrs = {}) - f1 = f.clone - f1[:version] = f[:version] + deltaVersion - f1.update(attrs) - f1 + f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs) end + before(:each) do + clear_data.call if !clear_data.nil? + end - it "is initialized" do - expect(store.initialized?).to eq true + # This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store + # that operates on a database that can be shared with other store instances (as opposed to the in-memory store, + # which has its own private storage). + if !clear_data_method.nil? + it "is not initialized by default" do + with_store do |store| + expect(store.initialized?).to eq false + end + end + + it "can detect if another instance has initialized the store" do + with_store do |store1| + store1.init({}) + with_store do |store2| + expect(store2.initialized?).to eq true + end + end + end + + it "can read data written by another instance" do + with_store do |store1| + store1.init({ things_kind => { key1.to_sym => thing1 } }) + with_store do |store2| + expect(store2.get(things_kind, key1)).to eq thing1 + end + end + end + + it "is independent from other stores with different prefixes" do + with_store({ prefix: "a" }) do |store_a| + store_a.init({ things_kind => { key1.to_sym => thing1 } }) + with_store({ prefix: "b" }) do |store_b| + store_b.init({ things_kind => {} }) + end + with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data + expect(store_b1.get(things_kind, key1)).to be_nil + expect(store_a.get(things_kind, key1)).to eq thing1 + end + end + end end - it "can get existing feature with symbol key" do - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "is initialized after calling init" do + with_inited_store([]) do |store| + expect(store.initialized?).to eq true + end end - it "can get existing feature with string key" do - expect(store.get(LaunchDarkly::FEATURES, key0.to_s)).to eq feature0 + it "can get existing item with symbol key" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, key1.to_sym)).to eq thing1 + end end - it "gets nil for nonexisting feature" do - expect(store.get(LaunchDarkly::FEATURES, 'nope')).to be_nil + it "can get existing item with string key" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, key1.to_s)).to eq thing1 + end end - it "can get all features" do - feature1 = feature0.clone - feature1[:key] = "test-feature-flag1" - feature1[:version] = 5 - feature1[:on] = false - store.upsert(LaunchDarkly::FEATURES, feature1) - expect(store.all(LaunchDarkly::FEATURES)).to eq ({ key0 => feature0, :"test-feature-flag1" => feature1 }) + it "gets nil for nonexisting item" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, unused_key)).to be_nil + end + end + + it "returns nil for deleted item" do + deleted_thing = thing1.clone.merge({ deleted: true }) + with_inited_store([ deleted_thing ]) do |store| + expect(store.get(things_kind, key1)).to be_nil + end + end + + it "can get all items" do + key2 = "thing2" + thing2 = { + key: key2, + name: "Thing 2", + version: 22, + deleted: false + } + with_inited_store([ thing1, thing2 ]) do |store| + expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 }) + end + end + + it "filters out deleted items when getting all" do + key2 = "thing2" + thing2 = { + key: key2, + name: "Thing 2", + version: 22, + deleted: true + } + with_inited_store([ thing1, thing2 ]) do |store| + expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 }) + end end - it "can add new feature" do - feature1 = feature0.clone - feature1[:key] = "test-feature-flag1" - feature1[:version] = 5 - feature1[:on] = false - store.upsert(LaunchDarkly::FEATURES, feature1) - expect(store.get(LaunchDarkly::FEATURES, :"test-feature-flag1")).to eq feature1 + it "can add new item" do + with_inited_store([]) do |store| + store.upsert(things_kind, thing1) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "can update feature with newer version" do - f1 = new_version_plus(feature0, 1, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq f1 + it "can update item with newer version" do + with_inited_store([ thing1 ]) do |store| + thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1_mod + end end - it "cannot update feature with same version" do - f1 = new_version_plus(feature0, 0, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot update item with same version" do + with_inited_store([ thing1 ]) do |store| + thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1 + end end it "cannot update feature with older version" do - f1 = new_version_plus(feature0, -1, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + with_inited_store([ thing1 ]) do |store| + thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "can delete feature with newer version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] + 1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to be_nil + it "can delete item with newer version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version] + 1) + expect(store.get(things_kind, key1)).to be_nil + end end - it "cannot delete feature with same version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version]) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot delete item with same version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version]) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "cannot delete feature with older version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] - 1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot delete item with older version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version] - 1) + expect(store.get(things_kind, key1)).to eq thing1 + end end end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb index a1673bbc..c403fc69 100644 --- a/spec/in_memory_feature_store_spec.rb +++ b/spec/in_memory_feature_store_spec.rb @@ -1,7 +1,7 @@ require "feature_store_spec_base" require "spec_helper" -def create_in_memory_store() +def create_in_memory_store(opts = {}) LaunchDarkly::InMemoryFeatureStore.new end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index 1aa6f919..13767686 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -1,5 +1,5 @@ require "feature_store_spec_base" -#require "diplomat" +require "diplomat" require "spec_helper" @@ -7,19 +7,23 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL -$base_opts = { +$consul_base_opts = { prefix: $my_prefix, logger: $null_log } def create_consul_store(opts = {}) LaunchDarkly::Integrations::Consul::new_feature_store( - opts.merge($base_opts).merge({ expiration: 60 })) + $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) LaunchDarkly::Integrations::Consul::new_feature_store( - opts.merge($base_opts).merge({ expiration: 0 })) + $consul_base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + Diplomat::Kv.delete($my_prefix + '/', recurse: true) end @@ -28,10 +32,10 @@ def create_consul_store_uncached(opts = {}) # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "feature_store", method(:create_consul_store) + include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached) + include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 38104fb3..4add3d53 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -15,7 +15,7 @@ endpoint: $endpoint } -$base_opts = { +$ddb_base_opts = { dynamodb_opts: $dynamodb_opts, prefix: $my_prefix, logger: $null_log @@ -23,12 +23,35 @@ def create_dynamodb_store(opts = {}) LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - opts.merge($base_opts).merge({ expiration: 60 })) + $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - opts.merge($base_opts).merge({ expiration: 0 })) + $ddb_base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + client = create_test_client + items_to_delete = [] + req = { + table_name: $table_name, + projection_expression: '#namespace, #key', + expression_attribute_names: { + '#namespace' => 'namespace', + '#key' => 'key' + } + } + while true + resp = client.scan(req) + items_to_delete = items_to_delete + resp.items + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + requests = items_to_delete.map do |item| + { delete_request: { key: item } } + end + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests) end def create_table_if_necessary @@ -72,10 +95,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store) + include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached) + include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index d5ccfb65..3da25f4f 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -9,13 +9,22 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL +$base_opts = { + prefix: $my_prefix, + logger: $null_log +} def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 60 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 0 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + client = Redis.new + client.flushdb end @@ -25,11 +34,11 @@ def create_redis_store_uncached(opts = {}) # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store) + include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached) + include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) From 97ee2daaa6e6cedb1e74adeab7785dd2759eb68e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 12:21:59 -0800 Subject: [PATCH 098/292] test fix: we can't use Unicode in flag keys anyway --- spec/feature_store_spec_base.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 3580a67f..d004cb54 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -112,7 +112,8 @@ def new_version_plus(f, deltaVersion, attrs = {}) it "stores Unicode data correctly" do flag = { - key: "tęst-feåtūre-flæg😺", + key: "my-fancy-flag", + name: "Tęst Feåtūre Flæg😺", version: 1, deleted: false } From 21a505e366505365458b98c1bd8fd8875183bf80 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 13:56:58 -0800 Subject: [PATCH 099/292] test fix --- spec/feature_store_spec_base.rb | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index b7d15cc5..2d06f0ff 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -205,7 +205,9 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 1, deleted: false } - store.upsert(LaunchDarkly::FEATURES, flag) - expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + with_inited_store([]) do |store| + store.upsert(LaunchDarkly::FEATURES, flag) + expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + end end end From 784eb07fcea16cf79e36def97ebf4967926b0f05 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 18:37:31 -0800 Subject: [PATCH 100/292] misc prerelease fixes --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 1 + lib/ldclient-rb/integrations/consul.rb | 1 + lib/ldclient-rb/stream.rb | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 4082378f..10c16dbc 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -23,6 +23,7 @@ def initialize(opts) @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? + Diplomat.configuration.url = opts[:url] if !opts[:url].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 8f5d1f09..2d46d813 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -22,6 +22,7 @@ def self.default_prefix # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default # Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`) + # @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index e4f1b3bd..094a37b2 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -58,7 +58,7 @@ def start conn.on_event { |event| process_message(event) } conn.on_error { |err| case err - when SSE::Errors::HTTPError + when SSE::Errors::HTTPStatusError status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } From e9b06c60c7e46d45487d823325e86804eb4c32fe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 17 Jan 2019 17:15:54 -0800 Subject: [PATCH 101/292] fix doc comments --- lib/ldclient-rb/integrations/consul.rb | 2 +- lib/ldclient-rb/integrations/dynamodb.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 2 +- lib/ldclient-rb/integrations/util/store_wrapper.rb | 2 +- lib/ldclient-rb/redis_store.rb | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 2d46d813..4f32d5fd 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -25,7 +25,7 @@ def self.default_prefix # @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index ecd87fce..189e118f 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -34,7 +34,7 @@ module DynamoDB # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 34509181..7e447657 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -42,7 +42,7 @@ def self.default_prefix # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index eef22d5e..26318d67 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -23,7 +23,7 @@ class CachingStoreWrapper # # @param core [Object] an object that implements the {FeatureStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored - # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # def initialize(core, opts) diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 392f5d2e..48632411 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -32,7 +32,7 @@ class RedisFeatureStore # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally # @option opts [Object] :pool custom connection pool, if desired # From 6389a2663bf7221ca0948261dadd2c00a72fc8df Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 11:38:23 -0800 Subject: [PATCH 102/292] add YARD config so our docs show up correctly everywhere --- .yardopts | 10 ++++++++++ scripts/gendocs.sh | 5 ++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 .yardopts diff --git a/.yardopts b/.yardopts new file mode 100644 index 00000000..559b7ab5 --- /dev/null +++ b/.yardopts @@ -0,0 +1,10 @@ +--no-private +--markup markdown +--markup-provider redcarpet +--embed-mixins +lib/*.rb +lib/**/*.rb +lib/**/**/*.rb +lib/**/**/**/*.rb +- +README.md diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 1e545955..96df177f 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -6,7 +6,6 @@ gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting -# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**" -PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb" +rm -rf doc/* -yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md +yard doc From 3406a03430efbd839659aee23d334d48b126da03 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 11:53:38 -0800 Subject: [PATCH 103/292] don't need markup-provider option --- .yardopts | 1 - 1 file changed, 1 deletion(-) diff --git a/.yardopts b/.yardopts index 559b7ab5..5388ac50 100644 --- a/.yardopts +++ b/.yardopts @@ -1,6 +1,5 @@ --no-private --markup markdown ---markup-provider redcarpet --embed-mixins lib/*.rb lib/**/*.rb From d38973acf1dbdda8da4ac529e472ec434a14742f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 10:57:43 -0800 Subject: [PATCH 104/292] rm obsolete proxy param --- lib/ldclient-rb/stream.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 094a37b2..ddb7f669 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -50,7 +50,6 @@ def start } opts = { headers: headers, - proxy: @config.proxy, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } From def22fc67e8b918cd7cef3006f0d896a7ed4bc68 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 21:38:49 -0800 Subject: [PATCH 105/292] remove net-http-persistent --- Gemfile.lock | 12 +++--------- ldclient-rb.gemspec | 1 - lib/ldclient-rb/events.rb | 17 +++++++++++------ lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/requestor.rb | 15 +++++++++------ lib/ldclient-rb/util.rb | 10 ++++++++++ spec/events_spec.rb | 15 +++++++++++---- 7 files changed, 45 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 2e96a86a..21a65cc1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,10 @@ PATH remote: . specs: - ldclient-rb (5.4.3) + ldclient-rb (5.5.2) concurrent-ruby (~> 1.0) - faraday (>= 0.9, < 2) - faraday-http-cache (>= 1.3.0, < 3) json (>= 1.8, < 3) ld-eventsource (~> 1.0) - net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) GEM @@ -35,11 +32,10 @@ GEM docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) - faraday-http-cache (2.0.0) - faraday (~> 0.8) ffi (1.9.25) ffi (1.9.25-java) - hitimes (1.3.0) + hitimes (1.3.1) + hitimes (1.3.1-java) http_tools (0.4.5) jmespath (1.4.0) json (1.8.6) @@ -53,8 +49,6 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - net-http-persistent (3.0.0) - connection_pool (~> 2.2) rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 810987a4..9fb4daa0 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -35,7 +35,6 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "net-http-persistent", [">= 2.9", "< 4.0"] spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "ld-eventsource", '~> 1.0' end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 02885904..72c82a90 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,7 +1,6 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" -require "net/http/persistent" require "thread" require "time" @@ -116,9 +115,10 @@ def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @config = config - @client = client ? client : Net::HTTP::Persistent.new do |c| - c.open_timeout = @config.connect_timeout - c.read_timeout = @config.read_timeout + if client + @client = client + else + @client = Util.new_http_client(@config.events_uri, @config) end @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @@ -167,7 +167,10 @@ def main_loop(queue, buffer, flush_workers) def do_shutdown(flush_workers) flush_workers.shutdown flush_workers.wait_for_termination - @client.shutdown + begin + @client.finish + rescue + end end def synchronize_for_testing(flush_workers) @@ -322,6 +325,7 @@ def run(sdk_key, config, client, payload, formatter) sleep(1) end begin + client.start if !client.started? config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } uri = URI(config.events_uri + "/bulk") req = Net::HTTP::Post.new(uri) @@ -330,7 +334,8 @@ def run(sdk_key, config, client, payload, formatter) req["Authorization"] = sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - res = client.request(uri, req) + req["Connection"] = "keep-alive" + res = client.request(req) rescue StandardError => exn config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } next diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 17ff7c12..da0427dc 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -26,7 +26,7 @@ def start def stop if @stopped.make_true - if @worker && @worker.alive? + if @worker && @worker.alive? && @worker != Thread.current @worker.run # causes the thread to wake up if it's currently in a sleep @worker.join end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 5f48d7ff..f7174787 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,6 +1,6 @@ require "concurrent/atomics" require "json" -require "net/http/persistent" +require "uri" module LaunchDarkly # @private @@ -22,9 +22,7 @@ class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Net::HTTP::Persistent.new - @client.open_timeout = @config.connect_timeout - @client.read_timeout = @config.read_timeout + @client = Util.new_http_client(@config.base_uri, @config) @cache = @config.cache_store end @@ -41,21 +39,26 @@ def request_all_data() end def stop - @client.shutdown + begin + @client.finish + rescue + end end private def make_request(path) + @client.start if !@client.started? uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) req["Authorization"] = @sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + req["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? req["If-None-Match"] = cached.etag end - res = @client.request(uri, req) + res = @client.request(req) status = res.code.to_i @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index e303e18a..03849957 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,7 +1,17 @@ +require "uri" module LaunchDarkly # @private module Util + def self.new_http_client(uri_s, config) + uri = URI(uri_s) + client = Net::HTTP.new(uri.hostname, uri.port) + client.use_ssl = true if uri.scheme == "https" + client.open_timeout = config.connect_timeout + client.read_timeout = config.read_timeout + client + end + def self.log_exception(logger, message, exc) logger.error { "[LDClient] #{message}: #{exc.inspect}" } logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 86cc67b6..90b91ec9 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -536,7 +536,7 @@ def reset @status = 200 end - def request(uri, req) + def request(req) @requests.push(req) if @exception raise @exception @@ -549,11 +549,18 @@ def request(uri, req) end end - def get_request - @requests.shift + def start + end + + def started? + false end - def shutdown + def finish + end + + def get_request + @requests.shift end end From f5ef9a4630df1444c6744bc33f0f07dbbdddb7cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 22:14:48 -0800 Subject: [PATCH 106/292] fix concurrent-ruby usage that breaks on Windows --- lib/ldclient-rb/events.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 72c82a90..c45a9da2 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -124,7 +124,7 @@ def initialize(queue, sdk_key, config, client) @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) - @last_known_past_time = Concurrent::AtomicFixnum.new(0) + @last_known_past_time = Concurrent::AtomicReference.new(0) buffer = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) From 9b4d75b99549393f8e12d3f0a498870b1bf52b28 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Mon, 4 Feb 2019 12:39:30 -0800 Subject: [PATCH 107/292] add pipeline and clean up with with rm_rf instead of rm --- azure-pipelines.yml | 14 ++++++++++++++ spec/file_data_source_spec.rb | 10 +++++----- 2 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 azure-pipelines.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000..1b9cae48 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,14 @@ +jobs: + - job: build + pool: + vmImage: 'vs2017-win2016' + steps: + - task: PowerShell@2 + inputs: + targetType: inline + script: | + ruby -v + gem install bundler -v 1.17.3 + bundle install + mkdir rspec + bundle exec rspec --exclude-pattern "spec/integrations/*,spec/redis_*" --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 28a0c06f..c827222d 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -74,7 +74,7 @@ segments: seg1: key: seg1 - include: ["user1"] + include: ["user1"] EOF } @@ -87,7 +87,7 @@ end after do - FileUtils.remove_dir(@tmp_dir) + FileUtils.rm_rf(@tmp_dir) end def make_temp_file(content) @@ -198,10 +198,10 @@ def test_auto_reload(options) event = ds.start expect(event.set?).to eq(true) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) - + sleep(1) IO.write(file, all_properties_json) - + max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" @@ -243,7 +243,7 @@ def test_auto_reload(options) client.close end end - + def wait_for_condition(max_time) deadline = Time.now + max_time while Time.now < deadline From eb4ad9703f5c230375b75dd01c3756ce0b8afcb3 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Mon, 4 Feb 2019 14:52:52 -0800 Subject: [PATCH 108/292] fix highlight blocks --- README.md | 58 +++++++++++++++++++++++++++---------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index f4dc72b7..df406928 100644 --- a/README.md +++ b/README.md @@ -17,19 +17,19 @@ Quick setup 1. Install the Ruby SDK with `gem` - ```shell +```shell gem install ldclient-rb ``` 2. Require the LaunchDarkly client: - ```ruby +```ruby require 'ldclient-rb' ``` 3. Create a new LDClient with your SDK key: - ```ruby +```ruby client = LaunchDarkly::LDClient.new("your_sdk_key") ``` @@ -39,42 +39,42 @@ client = LaunchDarkly::LDClient.new("your_sdk_key") 2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: - ```ruby +```ruby Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") ``` 3. You may want to include a function in your ApplicationController - ```ruby - def launchdarkly_settings - if current_user.present? - { - key: current_user.id, - anonymous: false, - email: current_user.email, - custom: { groups: current_user.groups.pluck(:name) }, - # Any other fields you may have - # e.g. lastName: current_user.last_name, - } - else - if Rails::VERSION::MAJOR <= 3 - hash_key = request.session_options[:id] - else - hash_key = session.id - end - # session ids should be private to prevent session hijacking - hash_key = Digest::SHA256.base64digest hash_key - { - key: hash_key, - anonymous: true, - } - end +```ruby +def launchdarkly_settings + if current_user.present? + { + key: current_user.id, + anonymous: false, + email: current_user.email, + custom: { groups: current_user.groups.pluck(:name) }, + # Any other fields you may have + # e.g. lastName: current_user.last_name, + } + else + if Rails::VERSION::MAJOR <= 3 + hash_key = request.session_options[:id] + else + hash_key = session.id end + # session ids should be private to prevent session hijacking + hash_key = Digest::SHA256.base64digest hash_key + { + key: hash_key, + anonymous: true, + } + end +end ``` 4. In your controllers, access the client using - ```ruby +```ruby Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) ``` From 4aa6272748587ac362bbe098cb1233acce43148b Mon Sep 17 00:00:00 2001 From: hroederld <46500128+hroederld@users.noreply.github.com> Date: Tue, 5 Feb 2019 00:12:38 +0000 Subject: [PATCH 109/292] Hr/azure3 (#103) * Add Consul and Redis services to Windows. * Enable Consul and Redis testing --- azure-pipelines.yml | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1b9cae48..40d39abe 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -4,11 +4,37 @@ jobs: vmImage: 'vs2017-win2016' steps: - task: PowerShell@2 + displayName: 'Setup Consul' inputs: targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + mkdir consul + Expand-Archive -Path consul.zip -DestinationPath consul + cd consul + sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" + sc.exe start "Consul" + - task: PowerShell@2 + displayName: 'Setup Redis' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip + mkdir redis + Expand-Archive -Path redis.zip -DestinationPath redis + cd redis + ./redis-server --service-install + ./redis-server --service-start + - task: PowerShell@2 + displayName: 'Setup SDK and Test' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v gem install bundler -v 1.17.3 bundle install mkdir rspec - bundle exec rspec --exclude-pattern "spec/integrations/*,spec/redis_*" --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + bundle exec rspec --exclude-pattern "spec/integrations/dynamodb_feature_store_spec.rb," --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From fd143d7150d908e1734faa3eccbb3390075a2d38 Mon Sep 17 00:00:00 2001 From: hroederld <46500128+hroederld@users.noreply.github.com> Date: Tue, 5 Feb 2019 10:40:07 -0800 Subject: [PATCH 110/292] add dynamo (#104) --- azure-pipelines.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 40d39abe..3d3fd98a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -3,6 +3,17 @@ jobs: pool: vmImage: 'vs2017-win2016' steps: + - task: PowerShell@2 + displayName: 'Setup Dynamo' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip + mkdir dynamo + Expand-Archive -Path dynamo.zip -DestinationPath dynamo + cd dynamo + javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar - task: PowerShell@2 displayName: 'Setup Consul' inputs: @@ -37,4 +48,4 @@ jobs: gem install bundler -v 1.17.3 bundle install mkdir rspec - bundle exec rspec --exclude-pattern "spec/integrations/dynamodb_feature_store_spec.rb," --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From 6a20ff1c1946992210fd33e1a1f7e997e29e43f5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 11:10:22 -0800 Subject: [PATCH 111/292] add experimentation event overrides for rules and fallthrough --- lib/ldclient-rb/evaluation.rb | 25 +++------ lib/ldclient-rb/impl/event_factory.rb | 77 +++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 44 ++++++--------- spec/evaluation_spec.rb | 74 +++++++++++++------------ spec/ldclient_spec.rb | 57 +++++++++++++++++++- 5 files changed, 193 insertions(+), 84 deletions(-) create mode 100644 lib/ldclient-rb/impl/event_factory.rb diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f873a6e3..14a7ea55 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -195,22 +195,22 @@ def error_result(errorKind, value = nil) # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns # the default value. Error conditions produce a result with an error reason, not an exception. - def evaluate(flag, user, store, logger) + def evaluate(flag, user, store, logger, event_factory) if user.nil? || user[:key].nil? return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end events = [] - detail = eval_internal(flag, user, store, events, logger) + detail = eval_internal(flag, user, store, events, logger, event_factory) return EvalResult.new(detail, events) end - def eval_internal(flag, user, store, events, logger) + def eval_internal(flag, user, store, events, logger, event_factory) if !flag[:on] return get_off_value(flag, { kind: 'OFF' }, logger) end - prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger, event_factory) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason, logger) end @@ -243,7 +243,7 @@ def eval_internal(flag, user, store, events, logger) return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) end - def check_prerequisites(flag, user, store, events, logger) + def check_prerequisites(flag, user, store, events, logger, event_factory) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -254,25 +254,16 @@ def check_prerequisites(flag, user, store, events, logger) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, store, events, logger) + prereq_res = eval_internal(prereq_flag, user, store, events, logger, event_factory) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end - event = { - kind: "feature", - key: prereq_key, - variation: prereq_res.variation_index, - value: prereq_res.value, - version: prereq_flag[:version], - prereqOf: flag[:key], - trackEvents: prereq_flag[:trackEvents], - debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] - } + event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) events.push(event) rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false end end diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb new file mode 100644 index 00000000..6af4c5f8 --- /dev/null +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -0,0 +1,77 @@ + +module LaunchDarkly + module Impl + # Event constructors are centralized here to avoid mistakes and repetitive logic. + # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons + # in the events (for when variation_detail is called) and one that doesn't. + class EventFactory + def initialize(with_reasons) + @with_reasons = with_reasons + end + + def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) + add_experiment_data = is_experiment(flag, detail.reason) + e = { + kind: 'feature', + key: flag[:key], + user: user, + variation: detail.variation_index, + value: detail.value, + default: default_value, + version: flag[:version] + } + # the following properties are handled separately so we don't waste bandwidth on unused keys + e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil? + e[:reason] = detail.reason if add_experiment_data || @with_reasons + e + end + + def new_default_event(flag, user, default_value, reason) + add_experiment_data = is_experiment(flag, reason) + e = { + kind: 'feature', + key: flag[:key], + user: user, + value: default_value, + default: default_value, + version: flag[:version] + } + e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + e[:reason] = reason if add_experiment_data || @with_reasons + e + end + + def new_unknown_flag_event(key, user, default_value, reason) + e = { + kind: 'feature', + key: key, + user: user, + value: default_value, + default: default_value + } + e[:reason] = reason if @with_reasons + e + end + + private + + def is_experiment(flag, reason) + return false if !reason + case reason[:kind] + when 'RULE_MATCH' + index = reason[:ruleIndex] + if !index.nil? + rules = flag[:rules] || [] + return index >= 0 && index < rules.length && rules[index][:trackEvents] + end + when 'FALLTHROUGH' + return !!flag[:trackEventsFallthrough] + end + false + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a5799700..0c113d0d 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" @@ -13,6 +14,7 @@ module LaunchDarkly # class LDClient include Evaluation + include Impl # # Creates a new client instance that connects to LaunchDarkly. A custom # configuration parameter can also supplied to specify advanced options, @@ -32,6 +34,9 @@ class LDClient def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key + @event_factory_default = EventFactory.new(false) + @event_factory_with_reasons = EventFactory.new(true) + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses # the feature store through the Config object, so we need to make a new Config that uses @@ -165,7 +170,7 @@ def initialized? # @return the variation to show the user, or the default value if there's an an error # def variation(key, user, default) - evaluate_internal(key, user, default, false).value + evaluate_internal(key, user, default, @event_factory_default).value end # @@ -192,7 +197,7 @@ def variation(key, user, default) # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) - evaluate_internal(key, user, default, true) + evaluate_internal(key, user, default, @event_factory_with_reasons) end # @@ -290,7 +295,7 @@ def all_flags_state(user, options={}) next end begin - result = evaluate(f, user, @store, @config.logger) + result = evaluate(f, user, @store, @config.logger, @event_factory_default) state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, details_only_if_tracked) rescue => exn @@ -330,7 +335,7 @@ def create_default_data_source(sdk_key, config) end # @return [EvaluationDetail] - def evaluate_internal(key, user, default, include_reasons_in_events) + def evaluate_internal(key, user, default, event_factory) if @config.offline? return error_result('CLIENT_NOT_READY', default) end @@ -340,8 +345,9 @@ def evaluate_internal(key, user, default, include_reasons_in_events) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return error_result('CLIENT_NOT_READY', default) + detail = error_result('CLIENT_NOT_READY', default) + @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) + return detail end end @@ -351,20 +357,19 @@ def evaluate_internal(key, user, default, include_reasons_in_events) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } detail = error_result('FLAG_NOT_FOUND', default) - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user, - reason: include_reasons_in_events ? detail.reason : nil) + @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } detail = error_result('USER_NOT_SPECIFIED', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end begin - res = evaluate(feature, user, @store, @config.logger) + res = evaluate(feature, user, @store, @config.logger, event_factory) if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -374,12 +379,12 @@ def evaluate_internal(key, user, default, include_reasons_in_events) if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_eval_event(feature, user, detail, default)) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = error_result('EXCEPTION', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end end @@ -389,21 +394,6 @@ def sanitize_user(user) user[:key] = user[:key].to_s end end - - def make_feature_event(flag, user, detail, default, with_reasons) - { - kind: "feature", - key: flag[:key], - user: user, - variation: detail.variation_index, - value: detail.value, - default: default, - version: flag[:version], - trackEvents: flag[:trackEvents], - debugEventsUntilDate: flag[:debugEventsUntilDate], - reason: with_reasons ? detail.reason : nil - } - end end # diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 3af960c6..c8949b3a 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -7,6 +7,8 @@ let(:features) { LaunchDarkly::InMemoryFeatureStore.new } + let(:factory) { LaunchDarkly::Impl::EventFactory.new(false) } + let(:user) { { key: "userkey", @@ -36,7 +38,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -50,7 +52,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -66,7 +68,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -82,7 +84,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -99,7 +101,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -127,10 +129,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -159,10 +160,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -189,10 +189,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -218,10 +217,9 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -236,7 +234,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -251,7 +249,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -266,7 +264,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -281,7 +279,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -299,7 +297,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -310,7 +308,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(true, 1, { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -321,7 +319,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -332,7 +330,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -343,7 +341,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -355,7 +353,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -366,28 +364,28 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "returns false for unknown operator" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'unknown', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "does not stop evaluating rules after clause with unknown operator" do @@ -397,14 +395,14 @@ def boolean_flag_with_clauses(clauses) clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } rule1 = { clauses: [ clause1 ], variation: 1 } flag = boolean_flag_with_rules([rule0, rule1]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "retrieves segment from segment store for segmentMatch operator" do @@ -419,14 +417,14 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "can be negated" do @@ -435,7 +433,7 @@ def boolean_flag_with_clauses(clauses) flag = boolean_flag_with_clauses([clause]) expect { clause[:negate] = true - }.to change {evaluate(flag, user, features, logger).detail.value}.from(true).to(false) + }.to change {evaluate(flag, user, features, logger, factory).detail.value}.from(true).to(false) end end @@ -538,7 +536,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x', custom: { foo: value1 } } clause = { attribute: 'foo', op: op, values: [value2] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be shouldBe + expect(evaluate(flag, user, features, logger, factory).detail.value).to be shouldBe end end end @@ -629,7 +627,7 @@ def test_segment_match(segment) features.upsert(LaunchDarkly::SEGMENTS, segment) clause = make_segment_match_clause(segment) flag = boolean_flag_with_clauses([clause]) - evaluate(flag, user, features, logger).detail.value + evaluate(flag, user, features, logger, factory).detail.value end it 'explicitly includes user' do diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index fca81ab0..2916861e 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -91,7 +91,6 @@ def event_processor key: "key", version: 100, user: nil, - variation: nil, value: "default", default: "default", trackEvents: true, @@ -109,7 +108,6 @@ def event_processor key: "key", version: 100, user: bad_user, - variation: nil, value: "default", default: "default", trackEvents: true, @@ -117,6 +115,61 @@ def event_processor )) client.variation("key", bad_user, "default") end + + it "sets trackEvents and reason if trackEvents is set for matched rule" do + flag = { + key: 'flag', + on: true, + variations: [ 'value' ], + version: 100, + rules: [ + clauses: [ + { attribute: 'key', op: 'in', values: [ user[:key] ] } + ], + variation: 0, + id: 'id', + trackEvents: true + ] + } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: 'feature', + key: 'flag', + version: 100, + user: user, + value: 'value', + default: 'default', + trackEvents: true, + reason: { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'id' } + )) + client.variation('flag', user, 'default') + end + + it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + flag = { + key: 'flag', + on: true, + variations: [ 'value' ], + fallthrough: { variation: 0 }, + version: 100, + rules: [], + trackEventsFallthrough: true + } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: 'feature', + key: 'flag', + version: 100, + user: user, + value: 'value', + default: 'default', + trackEvents: true, + reason: { kind: 'FALLTHROUGH' } + )) + client.variation('flag', user, 'default') + end end describe '#variation_detail' do From 12f541a4a3be42d7ea70d3cb2e6f2571958636f6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 12:04:13 -0800 Subject: [PATCH 112/292] warn & don't send event if identify or track has no valid user --- lib/ldclient-rb/ldclient.rb | 8 +++ spec/fixtures/numeric_key_user.json | 9 ---- spec/fixtures/sanitized_numeric_key_user.json | 9 ---- spec/ldclient_spec.rb | 54 ++++++++++++++++--- 4 files changed, 55 insertions(+), 25 deletions(-) delete mode 100644 spec/fixtures/numeric_key_user.json delete mode 100644 spec/fixtures/sanitized_numeric_key_user.json diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a5799700..28c21869 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -211,6 +211,10 @@ def variation_detail(key, user, default) # @return [void] # def identify(user) + if !user || user[:key].nil? + @config.logger.warn("Identify called with nil user or nil user key!") + return + end sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end @@ -229,6 +233,10 @@ def identify(user) # @return [void] # def track(event_name, user, data) + if !user || user[:key].nil? + @config.logger.warn("Track called with nil user or nil user key!") + return + end sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) end diff --git a/spec/fixtures/numeric_key_user.json b/spec/fixtures/numeric_key_user.json deleted file mode 100644 index 2a7ec475..00000000 --- a/spec/fixtures/numeric_key_user.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "key": 33, - "custom":{ - "groups":[ - "microsoft", - "google" - ] - } -} diff --git a/spec/fixtures/sanitized_numeric_key_user.json b/spec/fixtures/sanitized_numeric_key_user.json deleted file mode 100644 index 874e0067..00000000 --- a/spec/fixtures/sanitized_numeric_key_user.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "key": "33", - "custom":{ - "groups":[ - "microsoft", - "google" - ] - } -} diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index fca81ab0..6f530610 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -8,7 +8,8 @@ subject.new("secret", offline_config) end let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } - let(:config) { LaunchDarkly::Config.new({send_events: false, data_source: null_data}) } + let(:logger) { double().as_null_object } + let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do subject.new("secret", config) end @@ -17,16 +18,31 @@ JSON.parse(data, symbolize_names: true) end let(:user) do - data = File.read(File.join("spec", "fixtures", "user.json")) - JSON.parse(data, symbolize_names: true) + { + key: "user@test.com", + custom: { + groups: [ "microsoft", "google" ] + } + } end let(:numeric_key_user) do - data = File.read(File.join("spec", "fixtures", "numeric_key_user.json")) - JSON.parse(data, symbolize_names: true) + { + key: 33, + custom: { + groups: [ "microsoft", "google" ] + } + } end let(:sanitized_numeric_key_user) do - data = File.read(File.join("spec", "fixtures", "sanitized_numeric_key_user.json")) - JSON.parse(data, symbolize_names: true) + { + key: "33", + custom: { + groups: [ "microsoft", "google" ] + } + } + end + let(:user_without_key) do + { name: "Keyless Joe" } end def event_processor @@ -342,6 +358,18 @@ def event_processor expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", nil, nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", user_without_key, nil) + end end describe '#identify' do @@ -354,6 +382,18 @@ def event_processor expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.identify(numeric_key_user) end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(user_without_key) + end end describe 'with send_events: false' do From 2800db88876e85dc9bd918b01978f69f135d2207 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 12:18:05 -0800 Subject: [PATCH 113/292] include user in prereq flag events --- lib/ldclient-rb/evaluation.rb | 3 ++- spec/evaluation_spec.rb | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f873a6e3..1b5bbdca 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -263,6 +263,7 @@ def check_prerequisites(flag, user, store, events, logger) event = { kind: "feature", key: prereq_key, + user: user, variation: prereq_res.variation_index, value: prereq_res.value, version: prereq_flag[:version], @@ -272,7 +273,7 @@ def check_prerequisites(flag, user, store, events, logger) } events.push(event) rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false end end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 3af960c6..68824ebd 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -127,7 +127,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -159,7 +159,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -189,7 +189,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -218,7 +218,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) From 47106d9da24380ec3b7ee630a674a15dfef21dac Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 25 Feb 2019 16:12:29 -0800 Subject: [PATCH 114/292] rm unnecessary logic --- lib/ldclient-rb/impl/event_factory.rb | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 6af4c5f8..83dc76d9 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -29,7 +29,6 @@ def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) end def new_default_event(flag, user, default_value, reason) - add_experiment_data = is_experiment(flag, reason) e = { kind: 'feature', key: flag[:key], @@ -38,9 +37,9 @@ def new_default_event(flag, user, default_value, reason) default: default_value, version: flag[:version] } - e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:trackEvents] = true if flag[:trackEvents] e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - e[:reason] = reason if add_experiment_data || @with_reasons + e[:reason] = reason if @with_reasons e end From 763a222eece4e9eec4d8b7e441af62f8c2f4f607 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Feb 2019 15:25:43 -0800 Subject: [PATCH 115/292] more factory methods --- lib/ldclient-rb/impl/event_factory.rb | 20 ++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 4 ++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 83dc76d9..a43f6a33 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -4,6 +4,9 @@ module Impl # Event constructors are centralized here to avoid mistakes and repetitive logic. # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons # in the events (for when variation_detail is called) and one that doesn't. + # + # Note that these methods do not set the "creationDate" property, because in the Ruby client, + # that is done by EventProcessor.add_event(). class EventFactory def initialize(with_reasons) @with_reasons = with_reasons @@ -55,6 +58,23 @@ def new_unknown_flag_event(key, user, default_value, reason) e end + def new_identify_event(user) + { + kind: 'identify', + key: user[:key], + user: user + } + end + + def new_custom_event(event_name, user, data) + { + kind: 'custom', + key: event_name, + user: user, + data: data + } + end + private def is_experiment(flag, reason) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 0c113d0d..bf396827 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -217,7 +217,7 @@ def variation_detail(key, user, default) # def identify(user) sanitize_user(user) - @event_processor.add_event(kind: "identify", key: user[:key], user: user) + @event_processor.add_event(@event_factory_default.new_identify_event(user)) end # @@ -235,7 +235,7 @@ def identify(user) # def track(event_name, user, data) sanitize_user(user) - @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) + @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data)) end # From 4c234619ce652efcb8658ba5f9c85c728db138b7 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Wed, 6 Mar 2019 12:48:21 -0800 Subject: [PATCH 116/292] update readme to refer to docs --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index df406928..1c3eaa8a 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ require 'ldclient-rb' client = LaunchDarkly::LDClient.new("your_sdk_key") ``` +*NOTE: Please refer to [our documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-initializing-ldclient-using-spring-unicorn-or-puma) for additional instructions on how to use LaunchDarkly with [Spring](https://github.com/rails/spring), [Unicorn](https://bogomips.org/unicorn/), or [Puma](https://github.com/puma/puma).* + ### Ruby on Rails 1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` From 232f419e2bd69d5a6e46ca2e32b58aed42a4ceb2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Mar 2019 12:03:59 -0700 Subject: [PATCH 117/292] add Ruby 2.6.2 to CI --- .circleci/config.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8201b95d..c6ff6938 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,6 +9,7 @@ workflows: - test-2.3 - test-2.4 - test-2.5 + - test-2.6 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -57,6 +58,13 @@ jobs: - image: consul - image: redis - image: amazon/dynamodb-local + test-2.6: + <<: *ruby-docker-template + docker: + - image: circleci/ruby:2.6.2-stretch + - image: consul + - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: From 20d109b8a1561ed5a57b4c3fa1836cbbb30852d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Mar 2019 16:08:50 -0700 Subject: [PATCH 118/292] fix missing require for net/http --- lib/ldclient-rb/util.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 03849957..396a5171 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,3 +1,4 @@ +require "net/http" require "uri" module LaunchDarkly From 651dc37b8d13d75b8cba51d5069fe6af944d776d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 16:12:52 -0700 Subject: [PATCH 119/292] stringify built-in user attributes in events, and secondary key for evals --- lib/ldclient-rb/evaluation.rb | 8 ++++- lib/ldclient-rb/events.rb | 20 +++++++---- lib/ldclient-rb/ldclient.rb | 15 ++------ lib/ldclient-rb/util.rb | 15 ++++++++ spec/evaluation_spec.rb | 19 ++++++++++ spec/events_spec.rb | 65 +++++++++++++++++++++++++++++++++++ spec/ldclient_spec.rb | 26 -------------- 7 files changed, 122 insertions(+), 46 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 1b5bbdca..112aa975 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -189,6 +189,10 @@ def self.comparator(converter) # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) + ATTRS_TO_SANITIZE_FOR_EVALUATION = [ :key, :secondary ] + # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. + # This is because it could affect evaluation results for existing users (ch35206). + def error_result(errorKind, value = nil) EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) end @@ -200,8 +204,10 @@ def evaluate(flag, user, store, logger) return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end + sanitized_user = Util.stringify_attrs(user, ATTRS_TO_SANITIZE_FOR_EVALUATION) + events = [] - detail = eval_internal(flag, user, store, events, logger) + detail = eval_internal(flag, sanitized_user, store, events, logger) return EvalResult.new(detail, events) end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c45a9da2..69563572 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -7,9 +7,12 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 + USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, + :avatar, :name ] private_constant :MAX_FLUSH_WORKERS private_constant :CURRENT_SCHEMA_VERSION + private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS # @private class NullEventProcessor @@ -219,7 +222,7 @@ def notice_user(user) if user.nil? || !user.has_key?(:key) true else - @user_keys.add(user[:key]) + @user_keys.add(user[:key].to_s) end end @@ -371,6 +374,11 @@ def make_output_events(events, summary) private + def process_user(event) + filtered = @user_filter.transform_user_props(event[:user]) + Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS) + end + def make_output_event(event) case event[:kind] when "feature" @@ -386,7 +394,7 @@ def make_output_event(event) out[:version] = event[:version] if event.has_key?(:version) out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf) if @inline_users || is_debug - out[:user] = @user_filter.transform_user_props(event[:user]) + out[:user] = process_user(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end @@ -396,8 +404,8 @@ def make_output_event(event) { kind: "identify", creationDate: event[:creationDate], - key: event[:user].nil? ? nil : event[:user][:key], - user: @user_filter.transform_user_props(event[:user]) + key: event[:user].nil? ? nil : event[:user][:key].to_s, + user: process_user(event) } when "custom" out = { @@ -407,7 +415,7 @@ def make_output_event(event) } out[:data] = event[:data] if event.has_key?(:data) if @inline_users - out[:user] = @user_filter.transform_user_props(event[:user]) + out[:user] = process_user(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end @@ -416,7 +424,7 @@ def make_output_event(event) { kind: "index", creationDate: event[:creationDate], - user: @user_filter.transform_user_props(event[:user]) + user: process_user(event) } else event diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 28c21869..3680619a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -215,7 +215,6 @@ def identify(user) @config.logger.warn("Identify called with nil user or nil user key!") return end - sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end @@ -237,7 +236,6 @@ def track(event_name, user, data) @config.logger.warn("Track called with nil user or nil user key!") return end - sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) end @@ -280,8 +278,6 @@ def all_flags_state(user, options={}) return FeatureFlagsState.new(false) end - sanitize_user(user) - begin features = @store.all(FEATURES) rescue => exn @@ -353,7 +349,6 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end - sanitize_user(user) if !user.nil? feature = @store.get(FEATURES, key) if feature.nil? @@ -367,12 +362,12 @@ def evaluate_internal(key, user, default, include_reasons_in_events) unless user @config.logger.error { "[LDClient] Must specify user" } detail = error_result('USER_NOT_SPECIFIED', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(make_feature_event(feature, nil, detail, default, include_reasons_in_events)) return detail end begin - res = evaluate(feature, user, @store, @config.logger) + res = evaluate(feature, user, @store, @config.logger) # note, evaluate will do its own sanitization if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -392,12 +387,6 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end - def sanitize_user(user) - if user[:key] - user[:key] = user[:key].to_s - end - end - def make_feature_event(flag, user, detail, default, with_reasons) { kind: "feature", diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 396a5171..e129c279 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -4,6 +4,21 @@ module LaunchDarkly # @private module Util + def self.stringify_attrs(hash, attrs) + return hash if hash.nil? + ret = hash + changed = false + attrs.each do |attr| + value = hash[attr] + if !value.nil? && !value.is_a?(String) + ret = hash.clone if !changed + ret[attr] = value.to_s + changed = true + end + end + ret + end + def self.new_http_client(uri_s, config) uri = URI(uri_s) client = Net::HTTP.new(uri.hostname, uri.port) diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 68824ebd..52a617b6 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -359,6 +359,25 @@ def boolean_flag_with_clauses(clauses) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = evaluate(flag, user, features, logger) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = evaluate(flag, user, features, logger) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end end describe "clause" do diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 90b91ec9..557c3594 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -9,6 +9,10 @@ let(:hc) { FakeHttpClient.new } let(:user) { { key: "userkey", name: "Red" } } let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } + let(:numeric_user) { { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, + avatar: 8, name: 9, anonymous: false, custom: { age: 99 } } } + let(:stringified_numeric_user) { { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', + lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } after(:each) do if !@ep.nil? @@ -40,6 +44,21 @@ }) end + it "stringifies built-in user attributes in identify event" do + @ep = subject.new("sdk_key", default_config, hc) + flag = { key: "flagkey", version: 11 } + e = { kind: "identify", key: numeric_user[:key], user: numeric_user } + @ep.add_event(e) + + output = flush_and_get_events + expect(output).to contain_exactly( + kind: "identify", + key: numeric_user[:key].to_s, + creationDate: e[:creationDate], + user: stringified_numeric_user + ) + end + it "queues individual feature event with index event" do @ep = subject.new("sdk_key", default_config, hc) flag = { key: "flagkey", version: 11 } @@ -75,6 +94,23 @@ ) end + it "stringifies built-in user attributes in index event" do + @ep = subject.new("sdk_key", default_config, hc) + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + @ep.add_event(fe) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(index_event(fe, stringified_numeric_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end + it "can include inline user in feature event" do config = LaunchDarkly::Config.new(inline_users_in_events: true) @ep = subject.new("sdk_key", config, hc) @@ -92,6 +128,23 @@ ) end + it "stringifies built-in user attributes in feature event" do + config = LaunchDarkly::Config.new(inline_users_in_events: true) + @ep = subject.new("sdk_key", config, hc) + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + @ep.add_event(fe) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, stringified_numeric_user)), + include(:kind => "summary") + ) + end + it "filters user in feature event" do config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) @ep = subject.new("sdk_key", config, hc) @@ -323,6 +376,18 @@ ) end + it "stringifies built-in user attributes in custom event" do + config = LaunchDarkly::Config.new(inline_users_in_events: true) + @ep = subject.new("sdk_key", config, hc) + e = { kind: "custom", key: "eventkey", user: numeric_user } + @ep.add_event(e) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(custom_event(e, stringified_numeric_user)) + ) + end + it "does a final flush when shutting down" do @ep = subject.new("sdk_key", default_config, hc) e = { kind: "identify", key: user[:key], user: user } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6f530610..86cb5be5 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -25,22 +25,6 @@ } } end - let(:numeric_key_user) do - { - key: 33, - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:sanitized_numeric_key_user) do - { - key: "33", - custom: { - groups: [ "microsoft", "google" ] - } - } - end let(:user_without_key) do { name: "Keyless Joe" } end @@ -354,11 +338,6 @@ def event_processor client.track("custom_event_name", user, 42) end - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.track("custom_event_name", numeric_key_user, nil) - end - it "does not send an event, and logs a warning, if user is nil" do expect(event_processor).not_to receive(:add_event) expect(logger).to receive(:warn) @@ -378,11 +357,6 @@ def event_processor client.identify(user) end - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.identify(numeric_key_user) - end - it "does not send an event, and logs a warning, if user is nil" do expect(event_processor).not_to receive(:add_event) expect(logger).to receive(:warn) From da15bdd62515fb5ae47c23fd67c7956073465e23 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 16:15:56 -0700 Subject: [PATCH 120/292] make const names consistent --- lib/ldclient-rb/evaluation.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 112aa975..7edef6b2 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -189,7 +189,7 @@ def self.comparator(converter) # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) - ATTRS_TO_SANITIZE_FOR_EVALUATION = [ :key, :secondary ] + USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION = [ :key, :secondary ] # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. # This is because it could affect evaluation results for existing users (ch35206). @@ -204,7 +204,7 @@ def evaluate(flag, user, store, logger) return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end - sanitized_user = Util.stringify_attrs(user, ATTRS_TO_SANITIZE_FOR_EVALUATION) + sanitized_user = Util.stringify_attrs(user, USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION) events = [] detail = eval_internal(flag, sanitized_user, store, events, logger) From 260bd1b8cd75c110dd3f719fb907d255d90a0747 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 13 Apr 2019 16:16:43 -0700 Subject: [PATCH 121/292] support metric value with track() --- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/impl/event_factory.rb | 10 ++++++---- lib/ldclient-rb/ldclient.rb | 7 ++++--- spec/events_spec.rb | 3 ++- spec/ldclient_spec.rb | 6 ++++++ 5 files changed, 19 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c45a9da2..22fdd38c 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -411,6 +411,7 @@ def make_output_event(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end + out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) out when "index" { diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index a43f6a33..2e7d2697 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -66,13 +66,15 @@ def new_identify_event(user) } end - def new_custom_event(event_name, user, data) - { + def new_custom_event(event_name, user, data, metric_value) + e = { kind: 'custom', key: event_name, - user: user, - data: data + user: user } + e[:data] = data if !data.nil? + e[:metricValue] = metric_value if !metric_value.nil? + e end private diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index bf396827..dc40602c 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -230,12 +230,13 @@ def identify(user) # @param event_name [String] The name of the event # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} - # @param data [Hash] A hash containing any additional data associated with the event + # @param data [Hash] An optional hash containing any additional data associated with the event + # @param metric_value [Number] An optional numeric value that can be used for analytics purposes # @return [void] # - def track(event_name, user, data) + def track(event_name, user, data = nil, metric_value = nil) sanitize_user(user) - @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data)) + @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data, metric_value)) end # diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 90b91ec9..31b74b08 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -289,7 +289,7 @@ it "queues custom event with user" do @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } @ep.add_event(e) output = flush_and_get_events @@ -500,6 +500,7 @@ def custom_event(e, inline_user) else out[:user] = inline_user end + out[:metricValue] = e[:metricValue] if e.has_key?(:metricValue) out end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 2916861e..c07d4023 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -391,6 +391,12 @@ def event_processor client.track("custom_event_name", user, 42) end + it "can include a metric value" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: user, metricValue: 1.5)) + client.track("custom_event_name", user, nil, 1.5) + end + it "sanitizes the user in the event" do expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) From 703ffe5ca65e3299a2c7ffbef4baac42ddd08beb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 16 Apr 2019 18:41:36 -0700 Subject: [PATCH 122/292] update method description --- lib/ldclient-rb/ldclient.rb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index dc40602c..bd2e1225 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -231,7 +231,10 @@ def identify(user) # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} # @param data [Hash] An optional hash containing any additional data associated with the event - # @param metric_value [Number] An optional numeric value that can be used for analytics purposes + # @param metric_value [Number] A numeric value used by the LaunchDarkly experimentation + # feature in numeric custom metrics. Can be omitted if this event is used by only + # non-numeric metrics. This field will also be returned as part of the custom event + # for Data Export. # @return [void] # def track(event_name, user, data = nil, metric_value = nil) From 4bc671bd7dbaf400441b6a8a7852f8e45f437c42 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Mon, 6 May 2019 16:11:43 -0700 Subject: [PATCH 123/292] applying markdown templates and updating repository url references --- CHANGELOG.md | 16 ++--- CONTRIBUTING.md | 39 +++++++++- Gemfile.lock | 4 +- README.md | 169 +++++++------------------------------------- ldclient-rb.gemspec | 2 +- scripts/release.sh | 4 +- 6 files changed, 74 insertions(+), 160 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af4ffb62..2a4c2269 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,12 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ## [5.5.5] - 2019-03-28 ### Fixed: -- Setting user attributes to non-string values when a string was expected would cause analytics events not to be processed. Also, in the case of the `secondary` attribute, this could cause evaluations to fail for a flag with a percentage rollout. The SDK will now convert attribute values to strings as needed. ([#131](https://github.com/launchdarkly/ruby-client/issues/131)) +- Setting user attributes to non-string values when a string was expected would cause analytics events not to be processed. Also, in the case of the `secondary` attribute, this could cause evaluations to fail for a flag with a percentage rollout. The SDK will now convert attribute values to strings as needed. ([#131](https://github.com/launchdarkly/ruby-server-sdk/issues/131)) ## [5.5.4] - 2019-03-29 ### Fixed: -- Fixed a missing `require` that could sometimes cause a `NameError` to be thrown when starting the client, depending on what other gems were installed. This bug was introduced in version 5.5.3. ([#129](https://github.com/launchdarkly/ruby-client/issues/129)) -- When an analytics event was generated for a feature flag because it is a prerequisite for another flag that was evaluated, the user data was being omitted from the event. ([#128](https://github.com/launchdarkly/ruby-client/issues/128)) +- Fixed a missing `require` that could sometimes cause a `NameError` to be thrown when starting the client, depending on what other gems were installed. This bug was introduced in version 5.5.3. ([#129](https://github.com/launchdarkly/ruby-server-sdk/issues/129)) +- When an analytics event was generated for a feature flag because it is a prerequisite for another flag that was evaluated, the user data was being omitted from the event. ([#128](https://github.com/launchdarkly/ruby-server-sdk/issues/128)) - If `track` or `identify` is called without a user, the SDK now logs a warning, and does not send an analytics event to LaunchDarkly (since it would not be processed without a user). - Added a link from the SDK readme to the guide regarding the client initialization. @@ -44,7 +44,7 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ### Fixed: - Added or corrected a large number of documentation comments. All API classes and methods are now documented, and internal implementation details have been hidden from the documentation. You can view the latest documentation on [RubyDoc](https://www.rubydoc.info/gems/ldclient-rb). - Fixed a problem in the Redis feature store that would only happen under unlikely circumstances: trying to evaluate a flag when the LaunchDarkly client had not yet been fully initialized and the store did not yet have data in it, and then trying again when the client was still not ready but the store _did_ have data (presumably put there by another process). Previously, the second attempt would fail. -- In polling mode, the SDK did not correctly handle non-ASCII Unicode characters in feature flag data. ([#90](https://github.com/launchdarkly/ruby-client/issues/90)) +- In polling mode, the SDK did not correctly handle non-ASCII Unicode characters in feature flag data. ([#90](https://github.com/launchdarkly/ruby-server-sdk/issues/90)) ### Deprecated: - `RedisFeatureStore.new`. This implementation class may be changed or moved in the future; use `LaunchDarkly::Integrations::Redis::new_feature_store`. @@ -52,16 +52,16 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ## [5.4.3] - 2019-01-11 ### Changed: -- The SDK is now compatible with `net-http-persistent` 3.x. (Thanks, [CodingAnarchy](https://github.com/launchdarkly/ruby-client/pull/113)!) +- The SDK is now compatible with `net-http-persistent` 3.x. (Thanks, [CodingAnarchy](https://github.com/launchdarkly/ruby-server-sdk/pull/113)!) ## [5.4.2] - 2019-01-04 ### Fixed: -- Fixed overly specific dependency versions of `concurrent-ruby` and `semantic`. ([#115](https://github.com/launchdarkly/ruby-client/issues/115)) +- Fixed overly specific dependency versions of `concurrent-ruby` and `semantic`. ([#115](https://github.com/launchdarkly/ruby-server-sdk/issues/115)) - Removed obsolete dependencies on `hashdiff` and `thread_safe`. ## [5.4.1] - 2018-11-05 ### Fixed: -- Fixed a `LoadError` in `file_data_source.rb`, which was added in 5.4.0. (Thanks, [kbarrette](https://github.com/launchdarkly/ruby-client/pull/110)!) +- Fixed a `LoadError` in `file_data_source.rb`, which was added in 5.4.0. (Thanks, [kbarrette](https://github.com/launchdarkly/ruby-server-sdk/pull/110)!) ## [5.4.0] - 2018-11-02 @@ -128,7 +128,7 @@ Fixed a regression in version 5.0.0 that could prevent the client from reconnect ## [3.0.2] - 2018-03-06 ## Fixed -- Improved efficiency of logging by not constructing messages that won't be visible at the current log level. (Thanks, [julik](https://github.com/launchdarkly/ruby-client/pull/98)!) +- Improved efficiency of logging by not constructing messages that won't be visible at the current log level. (Thanks, [julik](https://github.com/launchdarkly/ruby-server-sdk/pull/98)!) ## [3.0.1] - 2018-02-26 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c6b8dd20..618877f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,37 @@ -Contributing to LaunchDarkly SDK for Ruby -========================================= +Contributing to the LaunchDarkly Server-side SDK for Ruby +================================================ -We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. + +Submitting bug reports and feature requests +------------------ + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/ruby-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. + +Submitting pull requests +------------------ + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +Build instructions +------------------ + +### Prerequisites + +This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. + +### Building + +To build the SDK without running any tests: + +``` +bundle install +``` + +### Testing + +To run all unit tests: + +``` +bundle exec rspec spec +``` \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 21a65cc1..aa131e55 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - ldclient-rb (5.5.2) + ldclient-rb (5.5.5) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) ld-eventsource (~> 1.0) @@ -23,7 +23,7 @@ GEM aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.4) + concurrent-ruby (1.1.5) connection_pool (2.2.1) diff-lcs (1.3) diplomat (2.0.2) diff --git a/README.md b/README.md index 1c3eaa8a..0fc5a9d2 100644 --- a/README.md +++ b/README.md @@ -1,135 +1,27 @@ -LaunchDarkly SDK for Ruby +LaunchDarkly Server-side SDK for Ruby =========================== [![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) -[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-client/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-client/tree/master) -[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-client/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-client/coverage) -[![security](https://hakiri.io/github/launchdarkly/ruby-client/master.svg)](https://hakiri.io/github/launchdarkly/ruby-client/master) +[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) +[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/coverage) +[![security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) + +LaunchDarkly overview +------------------------- +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/docs/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) Supported Ruby versions ----------------------- This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1.6 for JRuby. -Quick setup +Getting started ----------- -1. Install the Ruby SDK with `gem` - -```shell -gem install ldclient-rb -``` - -2. Require the LaunchDarkly client: - -```ruby -require 'ldclient-rb' -``` - -3. Create a new LDClient with your SDK key: - -```ruby -client = LaunchDarkly::LDClient.new("your_sdk_key") -``` - -*NOTE: Please refer to [our documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-initializing-ldclient-using-spring-unicorn-or-puma) for additional instructions on how to use LaunchDarkly with [Spring](https://github.com/rails/spring), [Unicorn](https://bogomips.org/unicorn/), or [Puma](https://github.com/puma/puma).* - -### Ruby on Rails - -1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` - -2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: - -```ruby -Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") -``` - -3. You may want to include a function in your ApplicationController - -```ruby -def launchdarkly_settings - if current_user.present? - { - key: current_user.id, - anonymous: false, - email: current_user.email, - custom: { groups: current_user.groups.pluck(:name) }, - # Any other fields you may have - # e.g. lastName: current_user.last_name, - } - else - if Rails::VERSION::MAJOR <= 3 - hash_key = request.session_options[:id] - else - hash_key = session.id - end - # session ids should be private to prevent session hijacking - hash_key = Digest::SHA256.base64digest hash_key - { - key: hash_key, - anonymous: true, - } - end -end -``` - -4. In your controllers, access the client using - -```ruby -Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) -``` - -Note that this gem will automatically switch to using the Rails logger it is detected. - - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com). -2. In your application code, use the feature's key to check whether the flag is on for each user: - -```ruby -if client.variation("your.flag.key", {key: "user@test.com"}, false) - # application code to show the feature -else - # the code to run if the feature is off -end -``` - -HTTPS proxy ------------ - -The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. (HTTP_PROXY is not used because all LaunchDarkly services require HTTPS.) - -How to set the HTTPS_PROXY environment variable on Mac/Linux systems: -``` -export HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -How to set the HTTPS_PROXY environment variable on Windows systems: -``` -set HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -If your proxy requires authentication then you can prefix the URN with your login information: -``` -export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` -or -``` -set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` - -Database integrations ---------------------- - -Feature flag data can be kept in a persistent store using Redis, DynamoDB, or Consul. These adapters are implemented in the `LaunchDarkly::Integrations::Redis`, `LaunchDarkly::Integrations::DynamoDB`, and `LaunchDarkly::Integrations::Consul` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [API documentation](https://www.rubydoc.info/gems/ldclient-rb/LaunchDarkly/Integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. - -Using flag data from a file ---------------------------- - -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See `LaunchDarkly::FileDataSource` or the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. +Refer to the [SDK documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-getting-started) for instructions on getting started with using the SDK. Learn more ----------- @@ -140,37 +32,26 @@ Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/l Testing ------- - + We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. - + Contributing ------------ - -See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md). - + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. + About LaunchDarkly ------------------- - +----------- + * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. -* LaunchDarkly provides feature flag SDKs for - * [Java](http://docs.launchdarkly.com/docs/java-sdk-reference "Java SDK") - * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") - * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") - * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") - * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") - * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") - * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") - * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") - * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") - * [Android](http://docs.launchdarkly.com/docs/android-sdk-reference "LaunchDarkly Android SDK") +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/docs) for a complete list. * Explore LaunchDarkly - * [launchdarkly.com](http://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information - * [docs.launchdarkly.com](http://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDKs - * [apidocs.launchdarkly.com](http://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation - * [blog.launchdarkly.com](http://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates + * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies \ No newline at end of file diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9fb4daa0..d1a19483 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -12,7 +12,7 @@ Gem::Specification.new do |spec| spec.email = ["team@launchdarkly.com"] spec.summary = "LaunchDarkly SDK for Ruby" spec.description = "Official LaunchDarkly SDK for Ruby" - spec.homepage = "https://github.com/launchdarkly/ruby-client" + spec.homepage = "https://github.com/launchdarkly/ruby-server-sdk" spec.license = "Apache-2.0" spec.files = `git ls-files -z`.split("\x0") diff --git a/scripts/release.sh b/scripts/release.sh index 18537846..314fe8b9 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -9,7 +9,7 @@ # When done you should commit and push the changes made. set -uxe -echo "Starting ruby-client release." +echo "Starting ruby-server-sdk release." VERSION=$1 @@ -24,4 +24,4 @@ gem build ldclient-rb.gemspec # Publish Ruby Gem gem push ldclient-rb-${VERSION}.gem -echo "Done with ruby-client release" \ No newline at end of file +echo "Done with ruby-server-sdk release" \ No newline at end of file From a56ee201360abd3a9c3ad9cb55cc6a7c9d493fef Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Tue, 7 May 2019 12:55:19 -0700 Subject: [PATCH 124/292] Cleaning up markdown files --- CONTRIBUTING.md | 4 +--- README.md | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 618877f8..6ed90ddb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,9 +20,7 @@ Build instructions This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. -### Building - -To build the SDK without running any tests: +To install the runtime dependencies: ``` bundle install diff --git a/README.md b/README.md index 0fc5a9d2..7795ddb9 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,7 @@ LaunchDarkly Server-side SDK for Ruby [![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) -[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/coverage) -[![security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) +[![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) LaunchDarkly overview ------------------------- From 962e729b88f81efdf61d3905aa2a888bfed6e8e7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 May 2019 18:01:29 -0700 Subject: [PATCH 125/292] allow skipping database tests --- CONTRIBUTING.md | 4 +++- spec/integrations/consul_feature_store_spec.rb | 1 + spec/integrations/dynamodb_feature_store_spec.rb | 3 ++- spec/redis_feature_store_spec.rb | 2 ++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6ed90ddb..ac126eec 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -32,4 +32,6 @@ To run all unit tests: ``` bundle exec rspec spec -``` \ No newline at end of file +``` + +By default, the full unit test suite includes live tests of the integrations for Consul, DynamoDB, and Redis. Those tests expect you to have instances of all of those databases running locally. To skip them, set the environment variable `LD_SKIP_DATABASE_TESTS=1` before running the tests. diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index 13767686..45f87097 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -28,6 +28,7 @@ def clear_all_data describe "Consul feature store" do + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 4add3d53..d924b30a 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -89,7 +89,8 @@ def create_test_client describe "DynamoDB feature store" do - + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + # These tests will all fail if there isn't a local DynamoDB instance running. create_table_if_necessary diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 3da25f4f..0f372184 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -31,6 +31,8 @@ def clear_all_data describe LaunchDarkly::RedisFeatureStore do subject { LaunchDarkly::RedisFeatureStore } + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do From f32b9c694b8ceb6f761fd068d085b2cce05200fc Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Mon, 13 May 2019 13:28:36 -0700 Subject: [PATCH 126/292] Updating the package name (#115) * update package name * missed one * revert module entry point name change --- Gemfile.lock | 4 ++-- README.md | 4 ++-- ldclient-rb.gemspec => launchdarkly-server-sdk.gemspec | 2 +- scripts/gendocs.sh | 2 +- scripts/release.sh | 8 ++++---- 5 files changed, 10 insertions(+), 10 deletions(-) rename ldclient-rb.gemspec => launchdarkly-server-sdk.gemspec (97%) diff --git a/Gemfile.lock b/Gemfile.lock index aa131e55..a076f848 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - ldclient-rb (5.5.5) + launchdarkly-server-sdk (5.5.6) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) ld-eventsource (~> 1.0) @@ -90,7 +90,7 @@ DEPENDENCIES codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) diplomat (>= 2.0.2) - ldclient-rb! + launchdarkly-server-sdk! listen (~> 3.0) rake (~> 10.0) redis (~> 3.3.5) diff --git a/README.md b/README.md index 7795ddb9..d3f99b69 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ LaunchDarkly Server-side SDK for Ruby =========================== -[![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) +[![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) [![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) @@ -27,7 +27,7 @@ Learn more Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). -Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/ldclient-rb). +Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). Testing ------- diff --git a/ldclient-rb.gemspec b/launchdarkly-server-sdk.gemspec similarity index 97% rename from ldclient-rb.gemspec rename to launchdarkly-server-sdk.gemspec index d1a19483..911e438b 100644 --- a/ldclient-rb.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -6,7 +6,7 @@ require "ldclient-rb/version" # rubocop:disable Metrics/BlockLength Gem::Specification.new do |spec| - spec.name = "ldclient-rb" + spec.name = "launchdarkly-server-sdk" spec.version = LaunchDarkly::VERSION spec.authors = ["LaunchDarkly"] spec.email = ["team@launchdarkly.com"] diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 96df177f..c5ec7dcf 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -1,7 +1,7 @@ #!/bin/bash # Use this script to generate documentation locally in ./doc so it can be proofed before release. -# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb +# After release, documentation will be visible at https://www.rubydoc.info/gems/launchdarkly-server-sdk gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting diff --git a/scripts/release.sh b/scripts/release.sh index 314fe8b9..9813240c 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# This script updates the version for the ldclient library and releases it to RubyGems +# This script updates the version for the launchdarkly-server-sdk library and releases it to RubyGems # It will only work if you have the proper credentials set up in ~/.gem/credentials # It takes exactly one argument: the new version. @@ -13,15 +13,15 @@ echo "Starting ruby-server-sdk release." VERSION=$1 -#Update version in ldclient/version.py +#Update version in lib/ldclient-rb/version.rb VERSION_RB_TEMP=./version.rb.tmp sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/ldclient-rb/version.rb > ${VERSION_RB_TEMP} mv ${VERSION_RB_TEMP} lib/ldclient-rb/version.rb # Build Ruby Gem -gem build ldclient-rb.gemspec +gem build launchdarkly-server-sdk.gemspec # Publish Ruby Gem -gem push ldclient-rb-${VERSION}.gem +gem push launchdarkly-server-sdk-${VERSION}.gem echo "Done with ruby-server-sdk release" \ No newline at end of file From 8defb308a44a25f056fcd7260393e25e8a277dbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 10 Jul 2019 17:49:38 -0700 Subject: [PATCH 127/292] bump ld-eventsource version for stream logging fix --- Gemfile.lock | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index a076f848..155eccf2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,10 +1,10 @@ PATH remote: . specs: - launchdarkly-server-sdk (5.5.6) + launchdarkly-server-sdk (5.5.7) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (~> 1.0) + ld-eventsource (= 1.0.1) semantic (~> 1.6) GEM @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.0) + ld-eventsource (1.0.1) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 911e438b..2e95cd41 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,5 +36,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", '~> 1.0' + spec.add_runtime_dependency "ld-eventsource", "1.0.1" end From c1aeaa3a8f35c7fb375507c4886b6e874e36ef69 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 11:49:41 -0700 Subject: [PATCH 128/292] use YAML.safe_load --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 7606c1d3..d5e05ae0 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.load(content)) + symbolize_all_keys(YAML.safe_load(content)) end def symbolize_all_keys(value) From f8aac44e2b4b0b2022ec845cd8eef68b57e3b8cd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 12:25:34 -0700 Subject: [PATCH 129/292] add unit test and temporarily revert fix to demonstrate failure --- lib/ldclient-rb/file_data_source.rb | 2 +- spec/file_data_source_spec.rb | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index d5e05ae0..7606c1d3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.safe_load(content)) + symbolize_all_keys(YAML.load(content)) end def symbolize_all_keys(value) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c827222d..837b775d 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -1,6 +1,14 @@ require "spec_helper" require "tempfile" +# see does not allow Ruby objects in YAML" for the purpose of the following two things +$created_bad_class = false +class BadClassWeShouldNotInstantiate < Hash + def []=(key, value) + $created_bad_class = true + end +end + describe LaunchDarkly::FileDataSource do let(:full_flag_1_key) { "flag1" } let(:full_flag_1_value) { "on" } @@ -78,6 +86,12 @@ EOF } + let(:unsafe_yaml) { <<-EOF +--- !ruby/hash:BadClassWeShouldNotInstantiate +foo: bar +EOF + } + let(:bad_file_path) { "no-such-file" } before do @@ -138,6 +152,20 @@ def with_data_source(options) end end + it "does not allow Ruby objects in YAML" do + # This tests for the vulnerability described here: https://trailofbits.github.io/rubysec/yaml/index.html + # The file we're loading contains a hash with a custom Ruby class, BadClassWeShouldNotInstantiate (see top + # of file). If we're not loading in safe mode, it will create an instance of that class and call its []= + # method, which we've defined to set $created_bad_class to true. In safe mode, it refuses to parse this file. + file = make_temp_file(unsafe_yaml) + with_data_source({ paths: [file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(false) + expect($created_bad_class).to eq(false) + end + end + it "sets start event and initialized on successful load" do file = make_temp_file(all_properties_json) with_data_source({ paths: [ file.path ] }) do |ds| From 8f480604b3b96f7bc3070bf49996f23078a871f2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 12:27:52 -0700 Subject: [PATCH 130/292] restore fix --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 7606c1d3..d5e05ae0 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.load(content)) + symbolize_all_keys(YAML.safe_load(content)) end def symbolize_all_keys(value) From 8fa005fee2d0800c5da76e745d82e0476cadd6af Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 13:08:50 -0700 Subject: [PATCH 131/292] add comment about not using FileDataSource in production --- lib/ldclient-rb/file_data_source.rb | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index d5e05ae0..cfea75f7 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -21,9 +21,11 @@ def self.have_listen? end # - # Provides a way to use local files as a source of feature flag state. This would typically be - # used in a test environment, to operate using a predetermined feature flag state without an - # actual LaunchDarkly connection. + # Provides a way to use local files as a source of feature flag state. This allows using a + # predetermined feature flag state without an actual LaunchDarkly connection. + # + # Reading flags from a file is only intended for pre-production environments. Production + # environments should always be configured to receive flag updates from LaunchDarkly. # # To use this component, call {FileDataSource#factory}, and store its return value in the # {Config#data_source} property of your LaunchDarkly client configuration. In the options From 963e4ebadf716b5014dc9f60e8fa0c445bd07a9b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 20:31:58 -0700 Subject: [PATCH 132/292] drop events if inbox is full --- lib/ldclient-rb/events.rb | 87 ++++++++++++++++++++++++++++----------- 1 file changed, 62 insertions(+), 25 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 69563572..f57287a4 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -4,6 +4,23 @@ require "thread" require "time" +# +# Analytics event processing in the SDK involves several components. The purpose of this design is to +# minimize overhead on the application threads that are generating analytics events. +# +# EventProcessor receives an analytics event from the SDK client, on an application thread. It places +# the event in a bounded queue, the "inbox", and immediately returns. +# +# On a separate worker thread, EventDispatcher consumes events from the inbox. These are considered +# "input events" because they may or may not actually be sent to LaunchDarkly; most flag evaluation +# events are not sent, but are counted and the counters become part of a single summary event. +# EventDispatcher updates those counters, creates "index" events for any users that have not been seen +# recently, and places any events that will be sent to LaunchDarkly into the "outbox" queue. +# +# When it is time to flush events to LaunchDarkly, the contents of the outbox are handed off to +# another worker thread which sends the HTTP request. +# + module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 @@ -68,28 +85,30 @@ class StopMessage < SynchronousMessage # @private class EventProcessor def initialize(sdk_key, config, client = nil) - @queue = Queue.new + @logger = config.logger + @inbox = SizedQueue.new(config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do - @queue << FlushMessage.new + post_to_inbox(FlushMessage.new) end @flush_task.execute @users_flush_task = Concurrent::TimerTask.new(execution_interval: config.user_keys_flush_interval) do - @queue << FlushUsersMessage.new + post_to_inbox(FlushUsersMessage.new) end @users_flush_task.execute @stopped = Concurrent::AtomicBoolean.new(false) - - EventDispatcher.new(@queue, sdk_key, config, client) + @inbox_full = Concurrent::AtomicBoolean.new(false) + + EventDispatcher.new(@inbox, sdk_key, config, client) end def add_event(event) event[:creationDate] = (Time.now.to_f * 1000).to_i - @queue << EventMessage.new(event) + post_to_inbox(EventMessage.new(event)) end def flush # flush is done asynchronously - @queue << FlushMessage.new + post_to_inbox(FlushMessage.new) end def stop @@ -97,9 +116,11 @@ def stop if @stopped.make_true @flush_task.shutdown @users_flush_task.shutdown - @queue << FlushMessage.new + # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox + # is full; an orderly shutdown can't happen unless these messages are received. + @inbox << FlushMessage.new stop_msg = StopMessage.new - @queue << stop_msg + @inbox << stop_msg stop_msg.wait_for_completion end end @@ -107,14 +128,30 @@ def stop # exposed only for testing def wait_until_inactive sync_msg = TestSyncMessage.new - @queue << sync_msg + @inbox << sync_msg sync_msg.wait_for_completion end + + private + + def post_to_inbox(message) + begin + @inbox.push(message, non_block=true) + rescue ThreadError + # If the inbox is full, it means the EventDispatcher thread is seriously backed up with not-yet-processed + # events. This is unlikely, but if it happens, it means the application is probably doing a ton of flag + # evaluations across many threads-- so if we wait for a space in the inbox, we risk a very serious slowdown + # of the app. To avoid that, we'll just drop the event. The log warning about this will only be shown once. + if @inbox_full.make_true + @logger.warn { "[LDClient] Events are being produced faster than they can be processed; some events will be dropped" } + end + end + end end # @private class EventDispatcher - def initialize(queue, sdk_key, config, client) + def initialize(inbox, sdk_key, config, client) @sdk_key = sdk_key @config = config @@ -129,10 +166,10 @@ def initialize(queue, sdk_key, config, client) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) - buffer = EventBuffer.new(config.capacity, config.logger) + outbox = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) - Thread.new { main_loop(queue, buffer, flush_workers) } + Thread.new { main_loop(inbox, outbox, flush_workers) } end private @@ -141,16 +178,16 @@ def now_millis() (Time.now.to_f * 1000).to_i end - def main_loop(queue, buffer, flush_workers) + def main_loop(inbox, outbox, flush_workers) running = true while running do begin - message = queue.pop + message = inbox.pop case message when EventMessage - dispatch_event(message.event, buffer) + dispatch_event(message.event, outbox) when FlushMessage - trigger_flush(buffer, flush_workers) + trigger_flush(outbox, flush_workers) when FlushUsersMessage @user_keys.clear when TestSyncMessage @@ -181,11 +218,11 @@ def synchronize_for_testing(flush_workers) flush_workers.wait_all end - def dispatch_event(event, buffer) + def dispatch_event(event, outbox) return if @disabled.value # Always record the event in the summary. - buffer.add_to_summary(event) + outbox.add_to_summary(event) # Decide whether to add the event to the payload. Feature events may be added twice, once for # the event (if tracked) and once for debugging. @@ -205,7 +242,7 @@ def dispatch_event(event, buffer) # an identify event for that user. if !(will_add_full_event && @config.inline_users_in_events) if event.has_key?(:user) && !notice_user(event[:user]) && event[:kind] != "identify" - buffer.add_event({ + outbox.add_event({ kind: "index", creationDate: event[:creationDate], user: event[:user] @@ -213,8 +250,8 @@ def dispatch_event(event, buffer) end end - buffer.add_event(event) if will_add_full_event - buffer.add_event(debug_event) if !debug_event.nil? + outbox.add_event(event) if will_add_full_event + outbox.add_event(debug_event) if !debug_event.nil? end # Add to the set of users we've noticed, and return true if the user was already known to us. @@ -236,12 +273,12 @@ def should_debug_event(event) end end - def trigger_flush(buffer, flush_workers) + def trigger_flush(outbox, flush_workers) if @disabled.value return end - payload = buffer.get_payload + payload = outbox.get_payload if !payload.events.empty? || !payload.summary.counters.empty? # If all available worker threads are busy, success will be false and no job will be queued. success = flush_workers.post do @@ -252,7 +289,7 @@ def trigger_flush(buffer, flush_workers) Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end - buffer.clear if success # Reset our internal state, these events now belong to the flush worker + outbox.clear if success # Reset our internal state, these events now belong to the flush worker end end From f0581a0120c987f9af5b1e42c09cffe2fb486ac8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 15:39:48 -0700 Subject: [PATCH 133/292] update doc comment for track with metric_value --- lib/ldclient-rb/ldclient.rb | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f75c8930..b7c2ee85 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -231,6 +231,11 @@ def identify(user) # Note that event delivery is asynchronous, so the event may not actually be sent # until later; see {#flush}. # + # As of this version’s release date, the LaunchDarkly service does not support the `metricValue` + # parameter. As a result, specifying `metricValue` will not yet produce any different behavior + # from omitting it. Refer to the [SDK reference guide](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-track) + # for the latest status. + # # @param event_name [String] The name of the event # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} From 7620721cdee390659cd86bd679c47b3d9781f9e9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 30 Dec 2019 13:59:17 -0800 Subject: [PATCH 134/292] don't let user fall outside of last bucket in rollout --- lib/ldclient-rb/evaluation.rb | 18 ++++++++---- spec/evaluation_spec.rb | 52 +++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 43a03c23..d0d2aa38 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -323,20 +323,28 @@ def clause_match_user_no_segments(clause, user) end def variation_index_for_user(flag, rule, user) - if !rule[:variation].nil? # fixed variation - return rule[:variation] - elsif !rule[:rollout].nil? # percentage rollout + variation = rule[:variation] + return variation if !variation.nil? # fixed variation + rollout = rule[:rollout] + return nil if rollout.nil? + variations = rollout[:variations] + if !variations.nil? && variations.length > 0 # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) sum = 0; - rollout[:variations].each do |variate| + variations.each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket < sum return variate[:variation] end end - nil + # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due + # to a rounding error, or due to the fact that we are scaling to 100000 rather than 99999, or the flag + # data could contain buckets that don't actually add up to 100000. Rather than returning an error in + # this case (or changing the scaling, which would potentially change the results for *all* users), we + # will simply put the user in the last bucket. + variations[-1][:variation] else # the rule isn't well-formed nil end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index ff4b63f6..2efbd745 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -560,6 +560,58 @@ def boolean_flag_with_clauses(clauses) end end + describe "variation_index_for_user" do + it "matches bucket" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + # so we can construct a rollout whose second bucket just barely contains that value + bucket_value = (bucket_user(user, flag_key, "key", salt) * 100000).truncate() + expect(bucket_value).to be > 0 + expect(bucket_value).to be < 100000 + + bad_variation_a = 0 + matched_variation = 1 + bad_variation_b = 2 + rule = { + rollout: { + variations: [ + { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value + { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value + { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation = variation_index_for_user(flag, rule, user) + expect(result_variation).to be matched_variation + end + + it "uses last bucket if bucket value is equal to total weight" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + bucket_value = (bucket_user(user, flag_key, "key", salt) * 100000).truncate() + + # We'll construct a list of variations that stops right at the target bucket value + rule = { + rollout: { + variations: [ + { variation: 0, weight: bucket_value } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation = variation_index_for_user(flag, rule, user) + expect(result_variation).to be 0 + end + end + describe "bucket_user" do it "gets expected bucket values for specific keys" do user = { key: "userKeyA" } From fbf8eb9d58aa34bf4cd21c8c9d24e1e62615c922 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:08:44 -0800 Subject: [PATCH 135/292] refactor evaluation logic and move it out of the main namespace --- lib/ldclient-rb.rb | 1 - lib/ldclient-rb/evaluation.rb | 445 ------------ lib/ldclient-rb/evaluation_detail.rb | 80 +++ lib/ldclient-rb/impl/evaluator.rb | 223 ++++++ lib/ldclient-rb/impl/evaluator_bucketing.rb | 32 + lib/ldclient-rb/impl/evaluator_operators.rb | 128 ++++ lib/ldclient-rb/ldclient.rb | 20 +- spec/evaluation_spec.rb | 737 -------------------- spec/impl/evaluator_bucketing_spec.rb | 59 ++ spec/impl/evaluator_operators_spec.rb | 106 +++ spec/impl/evaluator_spec.rb | 600 ++++++++++++++++ 11 files changed, 1240 insertions(+), 1191 deletions(-) delete mode 100644 lib/ldclient-rb/evaluation.rb create mode 100644 lib/ldclient-rb/evaluation_detail.rb create mode 100644 lib/ldclient-rb/impl/evaluator.rb create mode 100644 lib/ldclient-rb/impl/evaluator_bucketing.rb create mode 100644 lib/ldclient-rb/impl/evaluator_operators.rb delete mode 100644 spec/evaluation_spec.rb create mode 100644 spec/impl/evaluator_bucketing_spec.rb create mode 100644 spec/impl/evaluator_operators_spec.rb create mode 100644 spec/impl/evaluator_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index e5477ecb..9a215686 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -8,7 +8,6 @@ module LaunchDarkly require "ldclient-rb/version" require "ldclient-rb/interfaces" require "ldclient-rb/util" -require "ldclient-rb/evaluation" require "ldclient-rb/flags_state" require "ldclient-rb/ldclient" require "ldclient-rb/cache_store" diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb deleted file mode 100644 index 43a03c23..00000000 --- a/lib/ldclient-rb/evaluation.rb +++ /dev/null @@ -1,445 +0,0 @@ -require "date" -require "semantic" - -module LaunchDarkly - # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with - # an explanation of how it was calculated. - class EvaluationDetail - def initialize(value, variation_index, reason) - @value = value - @variation_index = variation_index - @reason = reason - end - - # - # The result of the flag evaluation. This will be either one of the flag's variations, or the - # default value that was passed to {LDClient#variation_detail}. It is the same as the return - # value of {LDClient#variation}. - # - # @return [Object] - # - attr_reader :value - - # - # The index of the returned value within the flag's list of variations. The first variation is - # 0, the second is 1, etc. This is `nil` if the default value was returned. - # - # @return [int|nil] - # - attr_reader :variation_index - - # - # An object describing the main factor that influenced the flag evaluation value. - # - # This object is currently represented as a Hash, which may have the following keys: - # - # `:kind`: The general category of reason. Possible values: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation - # * `'ERROR'`: the flag could not be evaluated, so the default value was returned - # - # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the - # matched rule (0 for the first rule). - # - # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. - # - # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of - # the prerequisite flag that failed. - # - # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: - # - # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had - # successfully initialized - # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag - # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a - # rule specified a nonexistent variation - # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied - # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation - # - # @return [Hash] - # - attr_reader :reason - - # - # Tests whether the flag evaluation returned a default value. This is the same as checking - # whether {#variation_index} is nil. - # - # @return [Boolean] - # - def default_value? - variation_index.nil? - end - - def ==(other) - @value == other.value && @variation_index == other.variation_index && @reason == other.reason - end - end - - # @private - module Evaluation - BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] - - NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") - - DATE_OPERAND = lambda do |v| - if v.is_a? String - begin - DateTime.rfc3339(v).strftime("%Q").to_i - rescue => e - nil - end - elsif v.is_a? Numeric - v - else - nil - end - end - - SEMVER_OPERAND = lambda do |v| - semver = nil - if v.is_a? String - for _ in 0..2 do - begin - semver = Semantic::Version.new(v) - break # Some versions of jruby cannot properly handle a return here and return from the method that calls this lambda - rescue ArgumentError - v = addZeroVersionComponent(v) - end - end - end - semver - end - - def self.addZeroVersionComponent(v) - NUMERIC_VERSION_COMPONENTS_REGEX.match(v) { |m| - m[0] + ".0" + v[m[0].length..-1] - } - end - - def self.comparator(converter) - lambda do |a, b| - av = converter.call(a) - bv = converter.call(b) - if !av.nil? && !bv.nil? - yield av <=> bv - else - return false - end - end - end - - OPERATORS = { - in: - lambda do |a, b| - a == b - end, - endsWith: - lambda do |a, b| - (a.is_a? String) && (a.end_with? b) - end, - startsWith: - lambda do |a, b| - (a.is_a? String) && (a.start_with? b) - end, - matches: - lambda do |a, b| - (b.is_a? String) && !(Regexp.new b).match(a).nil? - end, - contains: - lambda do |a, b| - (a.is_a? String) && (a.include? b) - end, - lessThan: - lambda do |a, b| - (a.is_a? Numeric) && (a < b) - end, - lessThanOrEqual: - lambda do |a, b| - (a.is_a? Numeric) && (a <= b) - end, - greaterThan: - lambda do |a, b| - (a.is_a? Numeric) && (a > b) - end, - greaterThanOrEqual: - lambda do |a, b| - (a.is_a? Numeric) && (a >= b) - end, - before: - comparator(DATE_OPERAND) { |n| n < 0 }, - after: - comparator(DATE_OPERAND) { |n| n > 0 }, - semVerEqual: - comparator(SEMVER_OPERAND) { |n| n == 0 }, - semVerLessThan: - comparator(SEMVER_OPERAND) { |n| n < 0 }, - semVerGreaterThan: - comparator(SEMVER_OPERAND) { |n| n > 0 }, - segmentMatch: - lambda do |a, b| - false # we should never reach this - instead we special-case this operator in clause_match_user - end - } - - # Used internally to hold an evaluation result and the events that were generated from prerequisites. - EvalResult = Struct.new(:detail, :events) - - USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION = [ :key, :secondary ] - # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. - # This is because it could affect evaluation results for existing users (ch35206). - - def error_result(errorKind, value = nil) - EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) - end - - # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns - # the default value. Error conditions produce a result with an error reason, not an exception. - def evaluate(flag, user, store, logger, event_factory) - if user.nil? || user[:key].nil? - return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) - end - - sanitized_user = Util.stringify_attrs(user, USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION) - - events = [] - detail = eval_internal(flag, sanitized_user, store, events, logger, event_factory) - return EvalResult.new(detail, events) - end - - def eval_internal(flag, user, store, events, logger, event_factory) - if !flag[:on] - return get_off_value(flag, { kind: 'OFF' }, logger) - end - - prereq_failure_reason = check_prerequisites(flag, user, store, events, logger, event_factory) - if !prereq_failure_reason.nil? - return get_off_value(flag, prereq_failure_reason, logger) - end - - # Check user target matches - (flag[:targets] || []).each do |target| - (target[:values] || []).each do |value| - if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }, logger) - end - end - end - - # Check custom rules - rules = flag[:rules] || [] - rules.each_index do |i| - rule = rules[i] - if rule_match_user(rule, user, store) - return get_value_for_variation_or_rollout(flag, rule, user, - { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }, logger) - end - end - - # Check the fallthrough rule - if !flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, - { kind: 'FALLTHROUGH' }, logger) - end - - return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) - end - - def check_prerequisites(flag, user, store, events, logger, event_factory) - (flag[:prerequisites] || []).each do |prerequisite| - prereq_ok = true - prereq_key = prerequisite[:key] - prereq_flag = store.get(FEATURES, prereq_key) - - if prereq_flag.nil? - logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } - prereq_ok = false - else - begin - prereq_res = eval_internal(prereq_flag, user, store, events, logger, event_factory) - # Note that if the prerequisite flag is off, we don't consider it a match no matter what its - # off variation was. But we still need to evaluate it in order to generate an event. - if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] - prereq_ok = false - end - event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) - events.push(event) - rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) - prereq_ok = false - end - end - if !prereq_ok - return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } - end - end - nil - end - - def rule_match_user(rule, user, store) - return false if !rule[:clauses] - - (rule[:clauses] || []).each do |clause| - return false if !clause_match_user(clause, user, store) - end - - return true - end - - def clause_match_user(clause, user, store) - # In the case of a segment match operator, we check if the user is in any of the segments, - # and possibly negate - if clause[:op].to_sym == :segmentMatch - (clause[:values] || []).each do |v| - segment = store.get(SEGMENTS, v) - return maybe_negate(clause, true) if !segment.nil? && segment_match_user(segment, user) - end - return maybe_negate(clause, false) - end - clause_match_user_no_segments(clause, user) - end - - def clause_match_user_no_segments(clause, user) - val = user_value(user, clause[:attribute]) - return false if val.nil? - - op = OPERATORS[clause[:op].to_sym] - if op.nil? - return false - end - - if val.is_a? Enumerable - val.each do |v| - return maybe_negate(clause, true) if match_any(op, v, clause[:values]) - end - return maybe_negate(clause, false) - end - - maybe_negate(clause, match_any(op, val, clause[:values])) - end - - def variation_index_for_user(flag, rule, user) - if !rule[:variation].nil? # fixed variation - return rule[:variation] - elsif !rule[:rollout].nil? # percentage rollout - rollout = rule[:rollout] - bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] - bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) - sum = 0; - rollout[:variations].each do |variate| - sum += variate[:weight].to_f / 100000.0 - if bucket < sum - return variate[:variation] - end - end - nil - else # the rule isn't well-formed - nil - end - end - - def segment_match_user(segment, user) - return false unless user[:key] - - return true if segment[:included].include?(user[:key]) - return false if segment[:excluded].include?(user[:key]) - - (segment[:rules] || []).each do |r| - return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) - end - - return false - end - - def segment_rule_match_user(rule, user, segment_key, salt) - (rule[:clauses] || []).each do |c| - return false unless clause_match_user_no_segments(c, user) - end - - # If the weight is absent, this rule matches - return true if !rule[:weight] - - # All of the clauses are met. See if the user buckets in - bucket = bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt) - weight = rule[:weight].to_f / 100000.0 - return bucket < weight - end - - def bucket_user(user, key, bucket_by, salt) - return nil unless user[:key] - - id_hash = bucketable_string_value(user_value(user, bucket_by)) - if id_hash.nil? - return 0.0 - end - - if user[:secondary] - id_hash += "." + user[:secondary] - end - - hash_key = "%s.%s.%s" % [key, salt, id_hash] - - hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] - hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) - end - - def bucketable_string_value(value) - return value if value.is_a? String - return value.to_s if value.is_a? Integer - nil - end - - def user_value(user, attribute) - attribute = attribute.to_sym - - if BUILTINS.include? attribute - user[attribute] - elsif !user[:custom].nil? - user[:custom][attribute] - else - nil - end - end - - def maybe_negate(clause, b) - clause[:negate] ? !b : b - end - - def match_any(op, value, values) - values.each do |v| - return true if op.call(value, v) - end - return false - end - - private - - def get_variation(flag, index, reason, logger) - if index < 0 || index >= flag[:variations].length - logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") - return error_result('MALFORMED_FLAG') - end - EvaluationDetail.new(flag[:variations][index], index, reason) - end - - def get_off_value(flag, reason, logger) - if flag[:offVariation].nil? # off variation unspecified - return default value - return EvaluationDetail.new(nil, nil, reason) - end - get_variation(flag, flag[:offVariation], reason, logger) - end - - def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) - index = variation_index_for_user(flag, vr, user) - if index.nil? - logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") - return error_result('MALFORMED_FLAG') - end - return get_variation(flag, index, reason, logger) - end - end -end diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb new file mode 100644 index 00000000..9db9f0fe --- /dev/null +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -0,0 +1,80 @@ + +module LaunchDarkly +# An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with + # an explanation of how it was calculated. + class EvaluationDetail + def initialize(value, variation_index, reason) + @value = value + @variation_index = variation_index + @reason = reason + end + + # + # The result of the flag evaluation. This will be either one of the flag's variations, or the + # default value that was passed to {LDClient#variation_detail}. It is the same as the return + # value of {LDClient#variation}. + # + # @return [Object] + # + attr_reader :value + + # + # The index of the returned value within the flag's list of variations. The first variation is + # 0, the second is 1, etc. This is `nil` if the default value was returned. + # + # @return [int|nil] + # + attr_reader :variation_index + + # + # An object describing the main factor that influenced the flag evaluation value. + # + # This object is currently represented as a Hash, which may have the following keys: + # + # `:kind`: The general category of reason. Possible values: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation + # * `'ERROR'`: the flag could not be evaluated, so the default value was returned + # + # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the + # matched rule (0 for the first rule). + # + # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. + # + # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of + # the prerequisite flag that failed. + # + # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: + # + # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had + # successfully initialized + # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag + # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a + # rule specified a nonexistent variation + # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied + # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation + # + # @return [Hash] + # + attr_reader :reason + + # + # Tests whether the flag evaluation returned a default value. This is the same as checking + # whether {#variation_index} is nil. + # + # @return [Boolean] + # + def default_value? + variation_index.nil? + end + + def ==(other) + @value == other.value && @variation_index == other.variation_index && @reason == other.reason + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb new file mode 100644 index 00000000..e84e369b --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -0,0 +1,223 @@ +require "ldclient-rb/evaluation_detail" +require "ldclient-rb/impl/evaluator_bucketing" +require "ldclient-rb/impl/evaluator_operators" + +module LaunchDarkly + module Impl + class Evaluator + def initialize(get_flag, get_segment, logger) + @get_flag = get_flag + @get_segment = get_segment + @logger = logger + end + + # Used internally to hold an evaluation result and the events that were generated from prerequisites. + EvalResult = Struct.new(:detail, :events) + + def self.error_result(errorKind, value = nil) + EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + end + + # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns + # the default value. Error conditions produce a result with an error reason, not an exception. + def evaluate(flag, user, event_factory) + if user.nil? || user[:key].nil? + return EvalResult.new(Evaluator.error_result('USER_NOT_SPECIFIED'), []) + end + + # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature + # request events for prerequisites and we can skip allocating an array. + if flag[:prerequisites] && !flag[:prerequisites].empty? + events = [] + else + events = nil + end + + detail = eval_internal(flag, user, events, event_factory) + return EvalResult.new(detail, events.nil? || events.empty? ? nil : events) + end + + private + + def eval_internal(flag, user, events, event_factory) + if !flag[:on] + return get_off_value(flag, { kind: 'OFF' }) + end + + prereq_failure_reason = check_prerequisites(flag, user, events, event_factory) + if !prereq_failure_reason.nil? + return get_off_value(flag, prereq_failure_reason) + end + + # Check user target matches + (flag[:targets] || []).each do |target| + (target[:values] || []).each do |value| + if value == user[:key] + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + end + end + end + + # Check custom rules + rules = flag[:rules] || [] + rules.each_index do |i| + rule = rules[i] + if rule_match_user(rule, user) + return get_value_for_variation_or_rollout(flag, rule, user, + { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }) + end + end + + # Check the fallthrough rule + if !flag[:fallthrough].nil? + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, + { kind: 'FALLTHROUGH' }) + end + + return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) + end + + def check_prerequisites(flag, user, events, event_factory) + (flag[:prerequisites] || []).each do |prerequisite| + prereq_ok = true + prereq_key = prerequisite[:key] + prereq_flag = @get_flag.call(prereq_key) + + if prereq_flag.nil? + @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } + prereq_ok = false + else + begin + prereq_res = eval_internal(prereq_flag, user, events, event_factory) + # Note that if the prerequisite flag is off, we don't consider it a match no matter what its + # off variation was. But we still need to evaluate it in order to generate an event. + if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] + prereq_ok = false + end + event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) + events.push(event) + rescue => exn + Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) + prereq_ok = false + end + end + if !prereq_ok + return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } + end + end + nil + end + + def rule_match_user(rule, user) + return false if !rule[:clauses] + + (rule[:clauses] || []).each do |clause| + return false if !clause_match_user(clause, user) + end + + return true + end + + def clause_match_user(clause, user) + # In the case of a segment match operator, we check if the user is in any of the segments, + # and possibly negate + if clause[:op].to_sym == :segmentMatch + result = (clause[:values] || []).any? { |v| + segment = @get_segment.call(v) + !segment.nil? && segment_match_user(segment, user) + } + clause[:negate] ? !result : result + else + clause_match_user_no_segments(clause, user) + end + end + + def clause_match_user_no_segments(clause, user) + user_val = EvaluatorOperators.user_value(user, clause[:attribute]) + return false if user_val.nil? + + op = clause[:op].to_sym + clause_vals = clause[:values] + result = if user_val.is_a? Enumerable + user_val.any? { |uv| clause_vals.any? { |cv| EvaluatorOperators.apply(op, uv, cv) } } + else + clause_vals.any? { |cv| EvaluatorOperators.apply(op, user_val, cv) } + end + clause[:negate] ? !result : result + end + + def variation_index_for_user(flag, rule, user) + if !rule[:variation].nil? # fixed variation + return rule[:variation] + elsif !rule[:rollout].nil? # percentage rollout + rollout = rule[:rollout] + bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] + bucket = EvaluatorBucketing.bucket_user(user, flag[:key], bucket_by, flag[:salt]) + sum = 0; + rollout[:variations].each do |variate| + sum += variate[:weight].to_f / 100000.0 + if bucket < sum + return variate[:variation] + end + end + nil + else # the rule isn't well-formed + nil + end + end + + def segment_match_user(segment, user) + return false unless user[:key] + + return true if segment[:included].include?(user[:key]) + return false if segment[:excluded].include?(user[:key]) + + (segment[:rules] || []).each do |r| + return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) + end + + return false + end + + def segment_rule_match_user(rule, user, segment_key, salt) + (rule[:clauses] || []).each do |c| + return false unless clause_match_user_no_segments(c, user) + end + + # If the weight is absent, this rule matches + return true if !rule[:weight] + + # All of the clauses are met. See if the user buckets in + bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt) + weight = rule[:weight].to_f / 100000.0 + return bucket < weight + end + + private + + def get_variation(flag, index, reason) + if index < 0 || index >= flag[:variations].length + @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") + return Evaluator.error_result('MALFORMED_FLAG') + end + EvaluationDetail.new(flag[:variations][index], index, reason) + end + + def get_off_value(flag, reason) + if flag[:offVariation].nil? # off variation unspecified - return default value + return EvaluationDetail.new(nil, nil, reason) + end + get_variation(flag, flag[:offVariation], reason) + end + + def get_value_for_variation_or_rollout(flag, vr, user, reason) + index = variation_index_for_user(flag, vr, user) + if index.nil? + @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") + return Evaluator.error_result('MALFORMED_FLAG') + end + return get_variation(flag, index, reason) + end + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb new file mode 100644 index 00000000..273ec1e6 --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -0,0 +1,32 @@ + +module LaunchDarkly + module Impl + module EvaluatorBucketing + def self.bucket_user(user, key, bucket_by, salt) + return nil unless user[:key] + + id_hash = bucketable_string_value(EvaluatorOperators.user_value(user, bucket_by)) + if id_hash.nil? + return 0.0 + end + + if user[:secondary] + id_hash += "." + user[:secondary].to_s + end + + hash_key = "%s.%s.%s" % [key, salt, id_hash] + + hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] + hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) + end + + private + + def self.bucketable_string_value(value) + return value if value.is_a? String + return value.to_s if value.is_a? Integer + nil + end + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb new file mode 100644 index 00000000..2bc8643b --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -0,0 +1,128 @@ +require "date" +require "semantic" + +module LaunchDarkly + module Impl + module EvaluatorOperators + def self.apply(op, user_value, clause_value) + case op + when :in + user_value == clause_value + when :startsWith + string_op(user_value, clause_value, lambda { |a, b| a.start_with? b }) + when :endsWith + string_op(user_value, clause_value, lambda { |a, b| a.end_with? b }) + when :contains + string_op(user_value, clause_value, lambda { |a, b| a.include? b }) + when :matches + string_op(user_value, clause_value, lambda { |a, b| !(Regexp.new b).match(a).nil? }) + when :lessThan + numeric_op(user_value, clause_value, lambda { |a, b| a < b }) + when :lessThanOrEqual + numeric_op(user_value, clause_value, lambda { |a, b| a <= b }) + when :greaterThan + numeric_op(user_value, clause_value, lambda { |a, b| a > b }) + when :greaterThanOrEqual + numeric_op(user_value, clause_value, lambda { |a, b| a >= b }) + when :before + date_op(user_value, clause_value, lambda { |a, b| a < b }) + when :after + date_op(user_value, clause_value, lambda { |a, b| a > b }) + when :semVerEqual + semver_op(user_value, clause_value, lambda { |a, b| a == b }) + when :semVerLessThan + semver_op(user_value, clause_value, lambda { |a, b| a < b }) + when :semVerGreaterThan + semver_op(user_value, clause_value, lambda { |a, b| a > b }) + when :segmentMatch + false # we should never reach this - instead we special-case this operator in clause_match_user + else + false + end + end + + def self.user_value(user, attribute) + attribute = attribute.to_sym + if BUILTINS.include? attribute + value = user[attribute] + return value.to_s if !value.nil? && !(value.is_a? String) + value + elsif !user[:custom].nil? + user[:custom][attribute] + else + nil + end + end + + private + + BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") + + private_constant :BUILTINS + private_constant :NUMERIC_VERSION_COMPONENTS_REGEX + + def self.string_op(user_value, clause_value, fn) + (user_value.is_a? String) && (clause_value.is_a? String) && fn.call(user_value, clause_value) + end + + def self.numeric_op(user_value, clause_value, fn) + (user_value.is_a? Numeric) && (clause_value.is_a? Numeric) && fn.call(user_value, clause_value) + end + + def self.date_op(user_value, clause_value, fn) + ud = to_date(user_value) + if !ud.nil? + cd = to_date(clause_value) + !cd.nil? && fn.call(ud, cd) + else + false + end + end + + def self.semver_op(user_value, clause_value, fn) + uv = to_semver(user_value) + if !uv.nil? + cv = to_semver(clause_value) + !cv.nil? && fn.call(uv, cv) + else + false + end + end + + def self.to_date(value) + if value.is_a? String + begin + DateTime.rfc3339(value).strftime("%Q").to_i + rescue => e + nil + end + elsif value.is_a? Numeric + value + else + nil + end + end + + def self.to_semver(value) + if value.is_a? String + for _ in 0..2 do + begin + return Semantic::Version.new(value) + rescue ArgumentError + value = add_zero_version_component(value) + end + end + end + nil + end + + def self.add_zero_version_component(v) + NUMERIC_VERSION_COMPONENTS_REGEX.match(v) { |m| + m[0] + ".0" + v[m[0].length..-1] + } + end + + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index b7c2ee85..8b22feca 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/evaluator" require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" @@ -13,7 +14,6 @@ module LaunchDarkly # should create a single client instance for the lifetime of the application. # class LDClient - include Evaluation include Impl # # Creates a new client instance that connects to LaunchDarkly. A custom @@ -46,6 +46,10 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config + get_flag = lambda { |key| @store.get(FEATURES, key) } + get_segment = lambda { |key| @store.get(SEGMENTS, key) } + @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, @config.logger) + if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new else @@ -310,7 +314,7 @@ def all_flags_state(user, options={}) next end begin - result = evaluate(f, user, @store, @config.logger, @event_factory_default) + result = @evaluator.evaluate(f, user, @event_factory_default) state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, details_only_if_tracked) rescue => exn @@ -352,7 +356,7 @@ def create_default_data_source(sdk_key, config) # @return [EvaluationDetail] def evaluate_internal(key, user, default, event_factory) if @config.offline? - return error_result('CLIENT_NOT_READY', default) + return Evaluator.error_result('CLIENT_NOT_READY', default) end if !initialized? @@ -360,7 +364,7 @@ def evaluate_internal(key, user, default, event_factory) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - detail = error_result('CLIENT_NOT_READY', default) + detail = Evaluator.error_result('CLIENT_NOT_READY', default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end @@ -370,20 +374,20 @@ def evaluate_internal(key, user, default, event_factory) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } - detail = error_result('FLAG_NOT_FOUND', default) + detail = Evaluator.error_result('FLAG_NOT_FOUND', default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } - detail = error_result('USER_NOT_SPECIFIED', default) + detail = Evaluator.error_result('USER_NOT_SPECIFIED', default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end begin - res = evaluate(feature, user, @store, @config.logger, event_factory) + res = @evaluator.evaluate(feature, user, event_factory) if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -397,7 +401,7 @@ def evaluate_internal(key, user, default, event_factory) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) - detail = error_result('EXCEPTION', default) + detail = Evaluator.error_result('EXCEPTION', default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb deleted file mode 100644 index ff4b63f6..00000000 --- a/spec/evaluation_spec.rb +++ /dev/null @@ -1,737 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::Evaluation do - subject { LaunchDarkly::Evaluation } - - include LaunchDarkly::Evaluation - - let(:features) { LaunchDarkly::InMemoryFeatureStore.new } - - let(:factory) { LaunchDarkly::Impl::EventFactory.new(false) } - - let(:user) { - { - key: "userkey", - email: "test@example.com", - name: "Bob" - } - } - - let(:logger) { LaunchDarkly::Config.default_logger } - - def boolean_flag_with_rules(rules) - { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } - end - - def boolean_flag_with_clauses(clauses) - boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) - end - - describe "evaluate" do - it "returns off variation if flag is off" do - flag = { - key: 'feature', - on: false, - offVariation: 1, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns nil if flag is off and off variation is unspecified" do - flag = { - key: 'feature', - on: false, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if off variation is too high" do - flag = { - key: 'feature', - on: false, - offVariation: 999, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if off variation is negative" do - flag = { - key: 'feature', - on: false, - offVariation: -1, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns off variation if prerequisite is not found" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'badfeature', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns off variation and event if prerequisite of a prerequisite is not found" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: true, - prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns off variation and event if prerequisite is off" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: false, - # note that even though it returns the desired variation, it is still off and therefore not a match - offVariation: 1, - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns off variation and event if prerequisite is not met" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: true, - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns fallthrough variation and event if prerequisite is met and there are no rules" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: true, - fallthrough: { variation: 1 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns an error if fallthrough variation is too high" do - flag = { - key: 'feature', - on: true, - fallthrough: { variation: 999 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if fallthrough variation is negative" do - flag = { - key: 'feature', - on: true, - fallthrough: { variation: -1 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if fallthrough has no variation or rollout" do - flag = { - key: 'feature', - on: true, - fallthrough: { }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if fallthrough has a rollout with no variations" do - flag = { - key: 'feature', - on: true, - fallthrough: { rollout: { variations: [] } }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "matches user from targets" do - flag = { - key: 'feature', - on: true, - targets: [ - { values: [ 'whoever', 'userkey' ], variation: 2 } - ], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "matches user from rules" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(true, 1, - { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule variation is too high" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule variation is negative" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule has neither variation nor rollout" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule has a rollout with no variations" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { variations: [] } } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "coerces user key to a string for evaluation" do - clause = { attribute: 'key', op: 'in', values: ['999'] } - flag = boolean_flag_with_clauses([clause]) - user = { key: 999 } - result = evaluate(flag, user, features, logger, factory) - expect(result.detail.value).to eq(true) - end - - it "coerces secondary key to a string for evaluation" do - # We can't really verify that the rollout calculation works correctly, but we can at least - # make sure it doesn't error out if there's a non-string secondary value (ch35189) - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = evaluate(flag, user, features, logger, factory) - expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) - end - end - - describe "clause" do - it "can match built-in attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "can match custom attribute" do - user = { key: 'x', name: 'Bob', custom: { legs: 4 } } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "returns false for missing attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "returns false for unknown operator" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'unknown', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "does not stop evaluating rules after clause with unknown operator" do - user = { key: 'x', name: 'Bob' } - clause0 = { attribute: 'name', op: 'unknown', values: [4] } - rule0 = { clauses: [ clause0 ], variation: 1 } - clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } - rule1 = { clauses: [ clause1 ], variation: 1 } - flag = boolean_flag_with_rules([rule0, rule1]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "retrieves segment from segment store for segmentMatch operator" do - segment = { - key: 'segkey', - included: [ 'userkey' ], - version: 1, - deleted: false - } - features.upsert(LaunchDarkly::SEGMENTS, segment) - - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "falls through with no errors if referenced segment is not found" do - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect { - clause[:negate] = true - }.to change {evaluate(flag, user, features, logger, factory).detail.value}.from(true).to(false) - end - end - - describe "operators" do - dateStr1 = "2017-12-06T00:00:00.000-07:00" - dateStr2 = "2017-12-06T00:01:01.000-07:00" - dateMs1 = 10000000 - dateMs2 = 10000001 - invalidDate = "hey what's this?" - - operatorTests = [ - # numeric comparisons - [ :in, 99, 99, true ], - [ :in, 99.0001, 99.0001, true ], - [ :in, 99, 99.0001, false ], - [ :in, 99.0001, 99, false ], - [ :lessThan, 99, 99.0001, true ], - [ :lessThan, 99.0001, 99, false ], - [ :lessThan, 99, 99, false ], - [ :lessThanOrEqual, 99, 99.0001, true ], - [ :lessThanOrEqual, 99.0001, 99, false ], - [ :lessThanOrEqual, 99, 99, true ], - [ :greaterThan, 99.0001, 99, true ], - [ :greaterThan, 99, 99.0001, false ], - [ :greaterThan, 99, 99, false ], - [ :greaterThanOrEqual, 99.0001, 99, true ], - [ :greaterThanOrEqual, 99, 99.0001, false ], - [ :greaterThanOrEqual, 99, 99, true ], - - # string comparisons - [ :in, "x", "x", true ], - [ :in, "x", "xyz", false ], - [ :startsWith, "xyz", "x", true ], - [ :startsWith, "x", "xyz", false ], - [ :endsWith, "xyz", "z", true ], - [ :endsWith, "z", "xyz", false ], - [ :contains, "xyz", "y", true ], - [ :contains, "y", "xyz", false ], - - # mixed strings and numbers - [ :in, "99", 99, false ], - [ :in, 99, "99", false ], - #[ :contains, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :startsWith, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :endsWith, "99", 99, false ] # currently throws exception - would return false in Java SDK - [ :lessThanOrEqual, "99", 99, false ], - #[ :lessThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK - [ :greaterThanOrEqual, "99", 99, false ], - #[ :greaterThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK - - # regex - [ :matches, "hello world", "hello.*rld", true ], - [ :matches, "hello world", "hello.*orl", true ], - [ :matches, "hello world", "l+", true ], - [ :matches, "hello world", "(world|planet)", true ], - [ :matches, "hello world", "aloha", false ], - #[ :matches, "hello world", "***not a regex", false ] # currently throws exception - same as Java SDK - - # dates - [ :before, dateStr1, dateStr2, true ], - [ :before, dateMs1, dateMs2, true ], - [ :before, dateStr2, dateStr1, false ], - [ :before, dateMs2, dateMs1, false ], - [ :before, dateStr1, dateStr1, false ], - [ :before, dateMs1, dateMs1, false ], - [ :before, dateStr1, invalidDate, false ], - [ :after, dateStr1, dateStr2, false ], - [ :after, dateMs1, dateMs2, false ], - [ :after, dateStr2, dateStr1, true ], - [ :after, dateMs2, dateMs1, true ], - [ :after, dateStr1, dateStr1, false ], - [ :after, dateMs1, dateMs1, false ], - [ :after, dateStr1, invalidDate, false ], - - # semver - [ :semVerEqual, "2.0.1", "2.0.1", true ], - [ :semVerEqual, "2.0", "2.0.0", true ], - [ :semVerEqual, "2-rc1", "2.0.0-rc1", true ], - [ :semVerEqual, "2+build2", "2.0.0+build2", true ], - [ :semVerLessThan, "2.0.0", "2.0.1", true ], - [ :semVerLessThan, "2.0", "2.0.1", true ], - [ :semVerLessThan, "2.0.1", "2.0.0", false ], - [ :semVerLessThan, "2.0.1", "2.0", false ], - [ :semVerLessThan, "2.0.0-rc", "2.0.0-rc.beta", true ], - [ :semVerGreaterThan, "2.0.1", "2.0.0", true ], - [ :semVerGreaterThan, "2.0.1", "2.0", true ], - [ :semVerGreaterThan, "2.0.0", "2.0.1", false ], - [ :semVerGreaterThan, "2.0", "2.0.1", false ], - [ :semVerGreaterThan, "2.0.0-rc.1", "2.0.0-rc.0", true ], - [ :semVerLessThan, "2.0.1", "xbad%ver", false ], - [ :semVerGreaterThan, "2.0.1", "xbad%ver", false ] - ] - - operatorTests.each do |params| - op = params[0] - value1 = params[1] - value2 = params[2] - shouldBe = params[3] - it "should return #{shouldBe} for #{value1} #{op} #{value2}" do - user = { key: 'x', custom: { foo: value1 } } - clause = { attribute: 'foo', op: op, values: [value2] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be shouldBe - end - end - end - - describe "bucket_user" do - it "gets expected bucket values for specific keys" do - user = { key: "userKeyA" } - bucket = bucket_user(user, "hashKey", "key", "saltyA") - expect(bucket).to be_within(0.0000001).of(0.42157587); - - user = { key: "userKeyB" } - bucket = bucket_user(user, "hashKey", "key", "saltyA") - expect(bucket).to be_within(0.0000001).of(0.6708485); - - user = { key: "userKeyC" } - bucket = bucket_user(user, "hashKey", "key", "saltyA") - expect(bucket).to be_within(0.0000001).of(0.10343106); - end - - it "can bucket by int value (equivalent to string)" do - user = { - key: "userkey", - custom: { - stringAttr: "33333", - intAttr: 33333 - } - } - stringResult = bucket_user(user, "hashKey", "stringAttr", "saltyA") - intResult = bucket_user(user, "hashKey", "intAttr", "saltyA") - - expect(intResult).to be_within(0.0000001).of(0.54771423) - expect(intResult).to eq(stringResult) - end - - it "cannot bucket by float value" do - user = { - key: "userkey", - custom: { - floatAttr: 33.5 - } - } - result = bucket_user(user, "hashKey", "floatAttr", "saltyA") - expect(result).to eq(0.0) - end - - - it "cannot bucket by bool value" do - user = { - key: "userkey", - custom: { - boolAttr: true - } - } - result = bucket_user(user, "hashKey", "boolAttr", "saltyA") - expect(result).to eq(0.0) - end - end - - def make_segment(key) - { - key: key, - included: [], - excluded: [], - salt: 'abcdef', - version: 1 - } - end - - def make_segment_match_clause(segment) - { - op: :segmentMatch, - values: [ segment[:key] ], - negate: false - } - end - - def make_user_matching_clause(user, attr) - { - attribute: attr.to_s, - op: :in, - values: [ user[attr.to_sym] ], - negate: false - } - end - - describe 'segment matching' do - def test_segment_match(segment) - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - flag = boolean_flag_with_clauses([clause]) - evaluate(flag, user, features, logger, factory).detail.value - end - - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'explicitly excludes user' do - segment = make_segment('segkey') - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be false - end - - it 'both includes and excludes user; include takes priority' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is absent' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is nil' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: nil - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user with full rollout' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 100000 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with zero rollout" do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 0 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end - - it "matches user with multiple clauses" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segClause2[:values] = [ 'wrong' ] - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end - end -end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb new file mode 100644 index 00000000..a840a7c7 --- /dev/null +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -0,0 +1,59 @@ +require "spec_helper" + +describe LaunchDarkly::Impl::EvaluatorBucketing do + subject { LaunchDarkly::Impl::EvaluatorBucketing } + + describe "bucket_user" do + it "gets expected bucket values for specific keys" do + user = { key: "userKeyA" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + expect(bucket).to be_within(0.0000001).of(0.42157587); + + user = { key: "userKeyB" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + expect(bucket).to be_within(0.0000001).of(0.6708485); + + user = { key: "userKeyC" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + expect(bucket).to be_within(0.0000001).of(0.10343106); + end + + it "can bucket by int value (equivalent to string)" do + user = { + key: "userkey", + custom: { + stringAttr: "33333", + intAttr: 33333 + } + } + stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA") + intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA") + + expect(intResult).to be_within(0.0000001).of(0.54771423) + expect(intResult).to eq(stringResult) + end + + it "cannot bucket by float value" do + user = { + key: "userkey", + custom: { + floatAttr: 33.5 + } + } + result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA") + expect(result).to eq(0.0) + end + + + it "cannot bucket by bool value" do + user = { + key: "userkey", + custom: { + boolAttr: true + } + } + result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA") + expect(result).to eq(0.0) + end + end +end diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb new file mode 100644 index 00000000..d24087f2 --- /dev/null +++ b/spec/impl/evaluator_operators_spec.rb @@ -0,0 +1,106 @@ +require "spec_helper" + +describe LaunchDarkly::Impl::EvaluatorOperators do + subject { LaunchDarkly::Impl::EvaluatorOperators } + + describe "operators" do + dateStr1 = "2017-12-06T00:00:00.000-07:00" + dateStr2 = "2017-12-06T00:01:01.000-07:00" + dateMs1 = 10000000 + dateMs2 = 10000001 + invalidDate = "hey what's this?" + + operatorTests = [ + # numeric comparisons + [ :in, 99, 99, true ], + [ :in, 99.0001, 99.0001, true ], + [ :in, 99, 99.0001, false ], + [ :in, 99.0001, 99, false ], + [ :lessThan, 99, 99.0001, true ], + [ :lessThan, 99.0001, 99, false ], + [ :lessThan, 99, 99, false ], + [ :lessThanOrEqual, 99, 99.0001, true ], + [ :lessThanOrEqual, 99.0001, 99, false ], + [ :lessThanOrEqual, 99, 99, true ], + [ :greaterThan, 99.0001, 99, true ], + [ :greaterThan, 99, 99.0001, false ], + [ :greaterThan, 99, 99, false ], + [ :greaterThanOrEqual, 99.0001, 99, true ], + [ :greaterThanOrEqual, 99, 99.0001, false ], + [ :greaterThanOrEqual, 99, 99, true ], + + # string comparisons + [ :in, "x", "x", true ], + [ :in, "x", "xyz", false ], + [ :startsWith, "xyz", "x", true ], + [ :startsWith, "x", "xyz", false ], + [ :endsWith, "xyz", "z", true ], + [ :endsWith, "z", "xyz", false ], + [ :contains, "xyz", "y", true ], + [ :contains, "y", "xyz", false ], + + # mixed strings and numbers + [ :in, "99", 99, false ], + [ :in, 99, "99", false ], + #[ :contains, "99", 99, false ], # currently throws exception - would return false in Java SDK + #[ :startsWith, "99", 99, false ], # currently throws exception - would return false in Java SDK + #[ :endsWith, "99", 99, false ] # currently throws exception - would return false in Java SDK + [ :lessThanOrEqual, "99", 99, false ], + #[ :lessThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + [ :greaterThanOrEqual, "99", 99, false ], + #[ :greaterThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + + # regex + [ :matches, "hello world", "hello.*rld", true ], + [ :matches, "hello world", "hello.*orl", true ], + [ :matches, "hello world", "l+", true ], + [ :matches, "hello world", "(world|planet)", true ], + [ :matches, "hello world", "aloha", false ], + #[ :matches, "hello world", "***not a regex", false ] # currently throws exception - same as Java SDK + + # dates + [ :before, dateStr1, dateStr2, true ], + [ :before, dateMs1, dateMs2, true ], + [ :before, dateStr2, dateStr1, false ], + [ :before, dateMs2, dateMs1, false ], + [ :before, dateStr1, dateStr1, false ], + [ :before, dateMs1, dateMs1, false ], + [ :before, dateStr1, invalidDate, false ], + [ :after, dateStr1, dateStr2, false ], + [ :after, dateMs1, dateMs2, false ], + [ :after, dateStr2, dateStr1, true ], + [ :after, dateMs2, dateMs1, true ], + [ :after, dateStr1, dateStr1, false ], + [ :after, dateMs1, dateMs1, false ], + [ :after, dateStr1, invalidDate, false ], + + # semver + [ :semVerEqual, "2.0.1", "2.0.1", true ], + [ :semVerEqual, "2.0", "2.0.0", true ], + [ :semVerEqual, "2-rc1", "2.0.0-rc1", true ], + [ :semVerEqual, "2+build2", "2.0.0+build2", true ], + [ :semVerLessThan, "2.0.0", "2.0.1", true ], + [ :semVerLessThan, "2.0", "2.0.1", true ], + [ :semVerLessThan, "2.0.1", "2.0.0", false ], + [ :semVerLessThan, "2.0.1", "2.0", false ], + [ :semVerLessThan, "2.0.0-rc", "2.0.0-rc.beta", true ], + [ :semVerGreaterThan, "2.0.1", "2.0.0", true ], + [ :semVerGreaterThan, "2.0.1", "2.0", true ], + [ :semVerGreaterThan, "2.0.0", "2.0.1", false ], + [ :semVerGreaterThan, "2.0", "2.0.1", false ], + [ :semVerGreaterThan, "2.0.0-rc.1", "2.0.0-rc.0", true ], + [ :semVerLessThan, "2.0.1", "xbad%ver", false ], + [ :semVerGreaterThan, "2.0.1", "xbad%ver", false ] + ] + + operatorTests.each do |params| + op = params[0] + value1 = params[1] + value2 = params[2] + shouldBe = params[3] + it "should return #{shouldBe} for #{value1} #{op} #{value2}" do + expect(subject::apply(op, value1, value2)).to be shouldBe + end + end + end +end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb new file mode 100644 index 00000000..04f99a22 --- /dev/null +++ b/spec/impl/evaluator_spec.rb @@ -0,0 +1,600 @@ +require "spec_helper" + +module LaunchDarkly + module Impl + describe "Evaluator" do + subject { Evaluator } + + let(:factory) { EventFactory.new(false) } + + let(:user) { + { + key: "userkey", + email: "test@example.com", + name: "Bob" + } + } + + let(:logger) { ::Logger.new($stdout, level: ::Logger::FATAL) } + + def get_nothing + lambda { |key| raise "should not have requested #{key}" } + end + + def get_things(map) + lambda { |key| + raise "should not have requested #{key}" if !map.has_key?(key) + map[key] + } + end + + def basic_evaluator + subject.new(get_nothing, get_nothing, logger) + end + + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end + + describe "evaluate" do + it "returns off variation if flag is off" do + flag = { + key: 'feature', + on: false, + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, { kind: 'OFF' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns nil if flag is off and off variation is unspecified" do + flag = { + key: 'feature', + on: false, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if off variation is too high" do + flag = { + key: 'feature', + on: false, + offVariation: 999, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if off variation is negative" do + flag = { + key: 'feature', + on: false, + offVariation: -1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns off variation if prerequisite is not found" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'badfeature', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) + e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns off variation and event if prerequisite of a prerequisite is not found" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: true, + prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things('feature1' => flag1, 'feature2' => nil) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns off variation and event if prerequisite is off" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: false, + # note that even though it returns the desired variation, it is still off and therefore not a match + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things({ 'feature1' => flag1 }) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns off variation and event if prerequisite is not met" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: true, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things({ 'feature1' => flag1 }) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns fallthrough variation and event if prerequisite is met and there are no rules" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: true, + fallthrough: { variation: 1 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things({ 'feature1' => flag1 }) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns an error if fallthrough variation is too high" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: 999 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if fallthrough variation is negative" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: -1 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if fallthrough has no variation or rollout" do + flag = { + key: 'feature', + on: true, + fallthrough: { }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if fallthrough has a rollout with no variations" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { variations: [] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "matches user from targets" do + flag = { + key: 'feature', + on: true, + targets: [ + { values: [ 'whoever', 'userkey' ], variation: 2 } + ], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "matches user from rules" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end + end + + describe "clause" do + it "can match built-in attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can match custom attribute" do + user = { key: 'x', name: 'Bob', custom: { legs: 4 } } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "returns false for missing attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "retrieves segment from segment store for segmentMatch operator" do + segment = { + key: 'segkey', + included: [ 'userkey' ], + version: 1, + deleted: false + } + get_segment = get_things({ 'segkey' => segment }) + e = subject.new(get_nothing, get_segment, logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be true + end + + it "falls through with no errors if referenced segment is not found" do + e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be false + end + + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) + expect { + clause[:negate] = true + }.to change {basic_evaluator.evaluate(flag, user, factory).detail.value}.from(true).to(false) + end + end + + def make_segment(key) + { + key: key, + included: [], + excluded: [], + salt: 'abcdef', + version: 1 + } + end + + def make_segment_match_clause(segment) + { + op: :segmentMatch, + values: [ segment[:key] ], + negate: false + } + end + + def make_user_matching_clause(user, attr) + { + attribute: attr.to_s, + op: :in, + values: [ user[attr.to_sym] ], + negate: false + } + end + + describe 'segment matching' do + def test_segment_match(segment) + clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + e = subject.new(get_nothing, get_things({ segment[:key] => segment }), logger) + e.evaluate(flag, user, factory).detail.value + end + + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'explicitly excludes user' do + segment = make_segment('segkey') + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be false + end + + it 'both includes and excludes user; include takes priority' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is absent' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is nil' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: nil + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user with full rollout' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 100000 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with zero rollout" do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 0 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + + it "matches user with multiple clauses" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with multiple clauses if a clause doesn't match" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segClause2[:values] = [ 'wrong' ] + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + end + end + end +end From 45ea4379045e99c122fb1a05960f11db5a071178 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:40:10 -0800 Subject: [PATCH 136/292] comments --- lib/ldclient-rb/impl/evaluator.rb | 27 ++++++++++++++++++--- lib/ldclient-rb/impl/evaluator_bucketing.rb | 8 ++++++ lib/ldclient-rb/impl/evaluator_operators.rb | 25 +++++++++++++++++-- 3 files changed, 55 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index e84e369b..abcde944 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -4,22 +4,43 @@ module LaunchDarkly module Impl + # Encapsulates the feature flag evaluation logic. The Evaluator has no knowledge of the rest of the SDK environment; + # if it needs to retrieve flags or segments that are referenced by a flag, it does so through a simple function that + # is provided in the constructor. It also produces feature requests as appropriate for any referenced prerequisite + # flags, but does not send them. class Evaluator + # A single Evaluator is instantiated for each client instance. + # + # @param get_flag [Function] called if the Evaluator needs to query a different flag from the one that it is + # currently evaluating (i.e. a prerequisite flag); takes a single parameter, the flag key, and returns the + # flag data - or nil if the flag is unknown or deleted + # @param get_segment [Function] similar to `get_flag`, but is used to query a user segment. + # @param logger [Logger] the client's logger def initialize(get_flag, get_segment, logger) @get_flag = get_flag @get_segment = get_segment @logger = logger end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. + # Used internally to hold an evaluation result and the events that were generated from prerequisites. The + # `detail` property is an EvaluationDetail. The `events` property can be either an array of feature request + # events or nil. EvalResult = Struct.new(:detail, :events) + # Helper function used internally to construct an EvaluationDetail for an error result. def self.error_result(errorKind, value = nil) EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) end - # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns - # the default value. Error conditions produce a result with an error reason, not an exception. + # The client's entry point for evaluating a flag. The returned `EvalResult` contains the evaluation result and + # any events that were generated for prerequisite flags; its `value` will be `nil` if the flag returns the + # default value. Error conditions produce a result with a nil value and an error reason, not an exception. + # + # @param flag [Object] the flag + # @param user [Object] the user properties + # @param event_factory [EventFactory] called to construct a feature request event when a prerequisite flag is + # evaluated; the caller is responsible for constructing the feature event for the top-level evaluation + # @return [EvalResult] the evaluation result def evaluate(flag, user, event_factory) if user.nil? || user[:key].nil? return EvalResult.new(Evaluator.error_result('USER_NOT_SPECIFIED'), []) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index 273ec1e6..4c00a009 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -1,7 +1,15 @@ module LaunchDarkly module Impl + # Encapsulates the logic for percentage rollouts. module EvaluatorBucketing + # Returns a user's bucket value as a floating-point value in `[0, 1)`. + # + # @param user [Object] the user properties + # @param key [String] the feature flag key (or segment key, if this is for a segment rule) + # @param bucket_by [String|Symbol] the name of the user attribute to be used for bucketing + # @param salt [String] the feature flag's or segment's salt value + # @return [Number] the bucket value, from 0 inclusive to 1 exclusive def self.bucket_user(user, key, bucket_by, salt) return nil unless user[:key] diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 2bc8643b..18a22f35 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -3,7 +3,17 @@ module LaunchDarkly module Impl + # Defines the behavior of all operators that can be used in feature flag rules and segment rules. module EvaluatorOperators + # Applies an operator to produce a boolean result. + # + # @param op [Symbol] one of the supported LaunchDarkly operators, as a symbol + # @param user_value the value of the user attribute that is referenced in the current clause (left-hand + # side of the expression) + # @param clause_value the constant value that `user_value` is being compared to (right-hand side of the + # expression) + # @return [Boolean] true if the expression should be considered a match; false if it is not a match, or + # if the values cannot be compared because they are of the wrong types, or if the operator is unknown def self.apply(op, user_value, clause_value) case op when :in @@ -35,12 +45,23 @@ def self.apply(op, user_value, clause_value) when :semVerGreaterThan semver_op(user_value, clause_value, lambda { |a, b| a > b }) when :segmentMatch - false # we should never reach this - instead we special-case this operator in clause_match_user + # We should never reach this; it can't be evaluated based on just two parameters, because it requires + # looking up the segment from the data store. Instead, we special-case this operator in clause_match_user. + false else false end end + # Retrieves the value of a user attribute by name. + # + # Built-in attributes correspond to top-level properties in the user object, and are always coerced to + # strings except for `anonymous`. Custom attributes correspond to properties within the `custom` property, + # if any, and can be of any type. + # + # @param user [Object] the user properties + # @param attribute [String|Symbol] the attribute to get, for instance `:key` or `:name` or `:some_custom_attr` + # @return the attribute value, or nil if the attribute is unknown def self.user_value(user, attribute) attribute = attribute.to_sym if BUILTINS.include? attribute @@ -56,7 +77,7 @@ def self.user_value(user, attribute) private - BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS From 575352c9cb308c2075bcd83b43cfcb0a306fd4e5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:47:27 -0800 Subject: [PATCH 137/292] fix type coercion behavior --- lib/ldclient-rb/impl/evaluator_operators.rb | 17 +++++++--- spec/impl/evaluator_operators_spec.rb | 35 +++++++++++++++++++++ 2 files changed, 47 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 18a22f35..98ac2e40 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -1,5 +1,6 @@ require "date" require "semantic" +require "set" module LaunchDarkly module Impl @@ -55,9 +56,13 @@ def self.apply(op, user_value, clause_value) # Retrieves the value of a user attribute by name. # - # Built-in attributes correspond to top-level properties in the user object, and are always coerced to - # strings except for `anonymous`. Custom attributes correspond to properties within the `custom` property, - # if any, and can be of any type. + # Built-in attributes correspond to top-level properties in the user object. They are treated as strings and + # non-string values are coerced to strings, except for `anonymous` which is treated as a boolean if present + # (using Ruby's "truthiness" standard). The coercion behavior is not guaranteed to be consistent with other + # SDKs; the built-in attributes should not be set to values of the wrong type (in the strongly-typed SDKs, + # they can't be, and in a future version of the Ruby SDK we may make it impossible to do so). + # + # Custom attributes correspond to properties within the `custom` property, if any, and can be of any type. # # @param user [Object] the user properties # @param attribute [String|Symbol] the attribute to get, for instance `:key` or `:name` or `:some_custom_attr` @@ -66,8 +71,8 @@ def self.user_value(user, attribute) attribute = attribute.to_sym if BUILTINS.include? attribute value = user[attribute] - return value.to_s if !value.nil? && !(value.is_a? String) - value + return nil if value.nil? + (attribute == :anonymous) ? !!value : value.to_s elsif !user[:custom].nil? user[:custom][attribute] else @@ -78,9 +83,11 @@ def self.user_value(user, attribute) private BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + NON_STRING_BUILTINS = Set[:anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS + private_constant :NON_STRING_BUILTINS private_constant :NUMERIC_VERSION_COMPONENTS_REGEX def self.string_op(user_value, clause_value, fn) diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index d24087f2..92c68483 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -103,4 +103,39 @@ end end end + + describe "user_value" do + [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous, :some_custom_attr].each do |attr| + it "returns nil if property #{attr} is not defined" do + expect(subject::user_value({}, attr)).to be nil + end + end + + [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name].each do |attr| + it "gets string value of string property #{attr}" do + expect(subject::user_value({ attr => 'x' }, attr)).to eq 'x' + end + + it "coerces non-string value of property #{attr} to string" do + expect(subject::user_value({ attr => 3 }, attr)).to eq '3' + end + end + + it "gets boolean value of property anonymous" do + expect(subject::user_value({ anonymous: true }, :anonymous)).to be true + expect(subject::user_value({ anonymous: false }, :anonymous)).to be false + end + + it "coerces non-boolean value of property anonymous to boolean" do + expect(subject::user_value({ anonymous: 3 }, :anonymous)).to be true + end + + it "gets string value of custom property" do + expect(subject::user_value({ custom: { some_custom_attr: 'x' } }, :some_custom_attr)).to eq 'x' + end + + it "gets non-string value of custom property" do + expect(subject::user_value({ custom: { some_custom_attr: 3 } }, :some_custom_attr)).to eq 3 + end + end end From 62548d1d62257e000885fde8c5afd30da625a3a7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:54:06 -0800 Subject: [PATCH 138/292] make type coercion behavior consistent with earlier versions for now --- lib/ldclient-rb/impl/evaluator_operators.rb | 12 +++++------- spec/impl/evaluator_operators_spec.rb | 4 ++-- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 98ac2e40..56621790 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -57,10 +57,10 @@ def self.apply(op, user_value, clause_value) # Retrieves the value of a user attribute by name. # # Built-in attributes correspond to top-level properties in the user object. They are treated as strings and - # non-string values are coerced to strings, except for `anonymous` which is treated as a boolean if present - # (using Ruby's "truthiness" standard). The coercion behavior is not guaranteed to be consistent with other - # SDKs; the built-in attributes should not be set to values of the wrong type (in the strongly-typed SDKs, - # they can't be, and in a future version of the Ruby SDK we may make it impossible to do so). + # non-string values are coerced to strings, except for `anonymous` which is meant to be a boolean if present + # and is not currently coerced. This behavior is consistent with earlier versions of the Ruby SDK, but is not + # guaranteed to be consistent with other SDKs, since the evaluator specification is based on the strongly-typed + # SDKs where it is not possible for an attribute to have the wrong type. # # Custom attributes correspond to properties within the `custom` property, if any, and can be of any type. # @@ -72,7 +72,7 @@ def self.user_value(user, attribute) if BUILTINS.include? attribute value = user[attribute] return nil if value.nil? - (attribute == :anonymous) ? !!value : value.to_s + (attribute == :anonymous) ? value : value.to_s elsif !user[:custom].nil? user[:custom][attribute] else @@ -83,11 +83,9 @@ def self.user_value(user, attribute) private BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] - NON_STRING_BUILTINS = Set[:anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS - private_constant :NON_STRING_BUILTINS private_constant :NUMERIC_VERSION_COMPONENTS_REGEX def self.string_op(user_value, clause_value, fn) diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index 92c68483..7fdb05ca 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -126,8 +126,8 @@ expect(subject::user_value({ anonymous: false }, :anonymous)).to be false end - it "coerces non-boolean value of property anonymous to boolean" do - expect(subject::user_value({ anonymous: 3 }, :anonymous)).to be true + it "does not coerces non-boolean value of property anonymous" do + expect(subject::user_value({ anonymous: 3 }, :anonymous)).to eq 3 end it "gets string value of custom property" do From fd1d8371369a8ae92ed117dd374b70d9b4028f56 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 13:27:22 -0800 Subject: [PATCH 139/292] whitespace --- lib/ldclient-rb/impl/evaluator_operators.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 56621790..62fc0927 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -148,7 +148,6 @@ def self.add_zero_version_component(v) m[0] + ".0" + v[m[0].length..-1] } end - end end end From 3c5289799ef4a0ff83422170fcc3985d3996f3a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 16:36:03 -0800 Subject: [PATCH 140/292] break up Evaluator tests further --- spec/impl/evaluator_clause_spec.rb | 55 +++++ spec/impl/evaluator_rule_spec.rb | 85 ++++++++ spec/impl/evaluator_segment_spec.rb | 125 +++++++++++ spec/impl/evaluator_spec.rb | 313 +--------------------------- spec/impl/evaluator_spec_base.rb | 75 +++++++ 5 files changed, 342 insertions(+), 311 deletions(-) create mode 100644 spec/impl/evaluator_clause_spec.rb create mode 100644 spec/impl/evaluator_rule_spec.rb create mode 100644 spec/impl/evaluator_segment_spec.rb create mode 100644 spec/impl/evaluator_spec_base.rb diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb new file mode 100644 index 00000000..a90a5499 --- /dev/null +++ b/spec/impl/evaluator_clause_spec.rb @@ -0,0 +1,55 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (clauses)", :evaluator_spec_base => true do + subject { Evaluator } + + it "can match built-in attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can match custom attribute" do + user = { key: 'x', name: 'Bob', custom: { legs: 4 } } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "returns false for missing attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + end + end +end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb new file mode 100644 index 00000000..ee2e315b --- /dev/null +++ b/spec/impl/evaluator_rule_spec.rb @@ -0,0 +1,85 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (rules)", :evaluator_spec_base => true do + subject { Evaluator } + + it "matches user from rules" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end + end + end +end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb new file mode 100644 index 00000000..64fb1bc7 --- /dev/null +++ b/spec/impl/evaluator_segment_spec.rb @@ -0,0 +1,125 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (segments)", :evaluator_spec_base => true do + subject { Evaluator } + + def test_segment_match(segment) + clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + e = Evaluator.new(get_nothing, get_things({ segment[:key] => segment }), logger) + e.evaluate(flag, user, factory).detail.value + end + + it "retrieves segment from segment store for segmentMatch operator" do + segment = { + key: 'segkey', + included: [ 'userkey' ], + version: 1, + deleted: false + } + get_segment = get_things({ 'segkey' => segment }) + e = subject.new(get_nothing, get_segment, logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be true + end + + it "falls through with no errors if referenced segment is not found" do + e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be false + end + + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'explicitly excludes user' do + segment = make_segment('segkey') + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be false + end + + it 'both includes and excludes user; include takes priority' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is absent' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is nil' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: nil + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user with full rollout' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 100000 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with zero rollout" do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 0 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + + it "matches user with multiple clauses" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with multiple clauses if a clause doesn't match" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segClause2[:values] = [ 'wrong' ] + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + end + end +end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 04f99a22..556a69f6 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -1,45 +1,11 @@ require "spec_helper" +require "impl/evaluator_spec_base" module LaunchDarkly module Impl - describe "Evaluator" do + describe "Evaluator", :evaluator_spec_base => true do subject { Evaluator } - let(:factory) { EventFactory.new(false) } - - let(:user) { - { - key: "userkey", - email: "test@example.com", - name: "Bob" - } - } - - let(:logger) { ::Logger.new($stdout, level: ::Logger::FATAL) } - - def get_nothing - lambda { |key| raise "should not have requested #{key}" } - end - - def get_things(map) - lambda { |key| - raise "should not have requested #{key}" if !map.has_key?(key) - map[key] - } - end - - def basic_evaluator - subject.new(get_nothing, get_nothing, logger) - end - - def boolean_flag_with_rules(rules) - { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } - end - - def boolean_flag_with_clauses(clauses) - boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) - end - describe "evaluate" do it "returns off variation if flag is off" do flag = { @@ -319,281 +285,6 @@ def boolean_flag_with_clauses(clauses) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) end - - it "matches user from rules" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(true, 1, - { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule variation is too high" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule variation is negative" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule has neither variation nor rollout" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule has a rollout with no variations" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { variations: [] } } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "coerces user key to a string for evaluation" do - clause = { attribute: 'key', op: 'in', values: ['999'] } - flag = boolean_flag_with_clauses([clause]) - user = { key: 999 } - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.value).to eq(true) - end - - it "coerces secondary key to a string for evaluation" do - # We can't really verify that the rollout calculation works correctly, but we can at least - # make sure it doesn't error out if there's a non-string secondary value (ch35189) - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) - end - end - - describe "clause" do - it "can match built-in attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true - end - - it "can match custom attribute" do - user = { key: 'x', name: 'Bob', custom: { legs: 4 } } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true - end - - it "returns false for missing attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false - end - - it "returns false for unknown operator" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'unknown', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false - end - - it "does not stop evaluating rules after clause with unknown operator" do - user = { key: 'x', name: 'Bob' } - clause0 = { attribute: 'name', op: 'unknown', values: [4] } - rule0 = { clauses: [ clause0 ], variation: 1 } - clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } - rule1 = { clauses: [ clause1 ], variation: 1 } - flag = boolean_flag_with_rules([rule0, rule1]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false - end - - it "retrieves segment from segment store for segmentMatch operator" do - segment = { - key: 'segkey', - included: [ 'userkey' ], - version: 1, - deleted: false - } - get_segment = get_things({ 'segkey' => segment }) - e = subject.new(get_nothing, get_segment, logger) - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(e.evaluate(flag, user, factory).detail.value).to be true - end - - it "falls through with no errors if referenced segment is not found" do - e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(e.evaluate(flag, user, factory).detail.value).to be false - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect { - clause[:negate] = true - }.to change {basic_evaluator.evaluate(flag, user, factory).detail.value}.from(true).to(false) - end - end - - def make_segment(key) - { - key: key, - included: [], - excluded: [], - salt: 'abcdef', - version: 1 - } - end - - def make_segment_match_clause(segment) - { - op: :segmentMatch, - values: [ segment[:key] ], - negate: false - } - end - - def make_user_matching_clause(user, attr) - { - attribute: attr.to_s, - op: :in, - values: [ user[attr.to_sym] ], - negate: false - } - end - - describe 'segment matching' do - def test_segment_match(segment) - clause = make_segment_match_clause(segment) - flag = boolean_flag_with_clauses([clause]) - e = subject.new(get_nothing, get_things({ segment[:key] => segment }), logger) - e.evaluate(flag, user, factory).detail.value - end - - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'explicitly excludes user' do - segment = make_segment('segkey') - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be false - end - - it 'both includes and excludes user; include takes priority' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is absent' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is nil' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: nil - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user with full rollout' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 100000 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with zero rollout" do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 0 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end - - it "matches user with multiple clauses" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segClause2[:values] = [ 'wrong' ] - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end end end end diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb new file mode 100644 index 00000000..fa8b86c3 --- /dev/null +++ b/spec/impl/evaluator_spec_base.rb @@ -0,0 +1,75 @@ +require "spec_helper" + +module LaunchDarkly + module Impl + module EvaluatorSpecBase + def factory + EventFactory.new(false) + end + + def user + { + key: "userkey", + email: "test@example.com", + name: "Bob" + } + end + + def logger + ::Logger.new($stdout, level: ::Logger::FATAL) + end + + def get_nothing + lambda { |key| raise "should not have requested #{key}" } + end + + def get_things(map) + lambda { |key| + raise "should not have requested #{key}" if !map.has_key?(key) + map[key] + } + end + + def basic_evaluator + subject.new(get_nothing, get_nothing, logger) + end + + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end + + def make_user_matching_clause(user, attr) + { + attribute: attr.to_s, + op: :in, + values: [ user[attr.to_sym] ], + negate: false + } + end + + def make_segment(key) + { + key: key, + included: [], + excluded: [], + salt: 'abcdef', + version: 1 + } + end + + def make_segment_match_clause(segment) + { + op: :segmentMatch, + values: [ segment[:key] ], + negate: false + } + end + end + + RSpec.configure { |c| c.include EvaluatorSpecBase, :evaluator_spec_base => true } + end +end From d06833619ae59c1fc8ce869050afe7703da8cc49 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 16:41:46 -0800 Subject: [PATCH 141/292] make EvaluationReason an immutable class --- lib/ldclient-rb/evaluation_detail.rb | 275 ++++++++++++++++++++++++--- lib/ldclient-rb/impl/evaluator.rb | 22 +-- lib/ldclient-rb/impl/model/flag.rb | 26 +++ lib/ldclient-rb/ldclient.rb | 13 +- spec/evaluation_detail_spec.rb | 135 +++++++++++++ spec/impl/evaluator_rule_spec.rb | 13 +- spec/impl/evaluator_spec.rb | 32 ++-- spec/ldclient_spec.rb | 18 +- 8 files changed, 452 insertions(+), 82 deletions(-) create mode 100644 lib/ldclient-rb/impl/model/flag.rb create mode 100644 spec/evaluation_detail_spec.rb diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index 9db9f0fe..bccaf133 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -3,7 +3,16 @@ module LaunchDarkly # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail + # Creates a new instance. + # + # @param value the result value of the flag evaluation; may be of any type + # @param variation_index [int|nil] the index of the value within the flag's list of variations, or + # `nil` if the application default value was returned + # @param reason [EvaluationReason] an object describing the main factor that influenced the result + # @raise [ArgumentError] if `variation_index` or `reason` is not of the correct type def initialize(value, variation_index, reason) + raise ArgumentError.new("variation_index must be a number") if !variation_index.nil? && !(variation_index.is_a? Numeric) + raise ArgumentError.new("reason must be an EvaluationReason") if !(reason.is_a? EvaluationReason) @value = value @variation_index = variation_index @reason = reason @@ -29,37 +38,7 @@ def initialize(value, variation_index, reason) # # An object describing the main factor that influenced the flag evaluation value. # - # This object is currently represented as a Hash, which may have the following keys: - # - # `:kind`: The general category of reason. Possible values: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation - # * `'ERROR'`: the flag could not be evaluated, so the default value was returned - # - # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the - # matched rule (0 for the first rule). - # - # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. - # - # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of - # the prerequisite flag that failed. - # - # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: - # - # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had - # successfully initialized - # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag - # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a - # rule specified a nonexistent variation - # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied - # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation - # - # @return [Hash] + # @return [EvaluationReason] # attr_reader :reason @@ -77,4 +56,238 @@ def ==(other) @value == other.value && @variation_index == other.variation_index && @reason == other.reason end end + + # Describes the reason that a flag evaluation produced a particular value. This is returned by + # methods such as {LDClient#variation_detail} as the `reason` property of an {EvaluationDetail}. + # + # The `kind` property is always defined, but other properties will have non-nil values only for + # certain values of `kind`. All properties are immutable. + # + # There is a standard JSON representation of evaluation reasons when they appear in analytics events. + # Use `as_json` or `to_json` to convert to this representation. + # + # Use factory methods such as {EvaluationReason#off} to obtain instances of this class. + class EvaluationReason + # Value for {#kind} indicating that the flag was off and therefore returned its configured off value. + OFF = :OFF + + # Value for {#kind} indicating that the flag was on but the user did not match any targets or rules. + FALLTHROUGH = :FALLTHROUGH + + # Value for {#kind} indicating that the user key was specifically targeted for this flag. + TARGET_MATCH = :TARGET_MATCH + + # Value for {#kind} indicating that the user matched one of the flag's rules. + RULE_MATCH = :RULE_MATCH + + # Value for {#kind} indicating that the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation. + PREREQUISITE_FAILED = :PREREQUISITE_FAILED + + # Value for {#kind} indicating that the flag could not be evaluated, e.g. because it does not exist + # or due to an unexpected error. In this case the result value will be the application default value + # that the caller passed to the client. Check {#error_kind} for more details on the problem. + ERROR = :ERROR + + # Value for {#error_kind} indicating that the caller tried to evaluate a flag before the client had + # successfully initialized. + ERROR_CLIENT_NOT_READY = :CLIENT_NOT_READY + + # Value for {#error_kind} indicating that the caller provided a flag key that did not match any known flag. + ERROR_FLAG_NOT_FOUND = :FLAG_NOT_FOUND + + # Value for {#error_kind} indicating that there was an internal inconsistency in the flag data, e.g. + # a rule specified a nonexistent variation. An error message will always be logged in this case. + ERROR_MALFORMED_FLAG = :MALFORMED_FLAG + + # Value for {#error_kind} indicating that the caller passed `nil` for the user parameter, or the + # user lacked a key. + ERROR_USER_NOT_SPECIFIED = :USER_NOT_SPECIFIED + + # Value for {#error_kind} indicating that an unexpected exception stopped flag evaluation. An error + # message will always be logged in this case. + ERROR_EXCEPTION = :EXCEPTION + + # Indicates the general category of the reason. Will always be one of the class constants such + # as {#OFF}. + attr_reader :kind + + # The index of the rule that was matched (0 for the first rule in the feature flag). If + # {#kind} is not {#RULE_MATCH}, this will be `nil`. + attr_reader :rule_index + + # A unique string identifier for the matched rule, which will not change if other rules are added + # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`. + attr_reader :rule_id + + # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not + # {#PREREQUISITE_FAILED}, this will be `nil`. + attr_reader :prerequisite_key + + # A value indicating the general category of error. This should be one of the class constants such + # as {#ERROR_FLAG_NOT_FOUND}. If {#kind} is not {#ERROR}, it will be `nil`. + attr_reader :error_kind + + # Returns an instance whose {#kind} is {#OFF}. + # @return [EvaluationReason] + def self.off + @@off + end + + # Returns an instance whose {#kind} is {#FALLTHROUGH}. + # @return [EvaluationReason] + def self.fallthrough + @@fallthrough + end + + # Returns an instance whose {#kind} is {#TARGET_MATCH}. + # @return [EvaluationReason] + def self.target_match + @@target_match + end + + # Returns an instance whose {#kind} is {#RULE_MATCH}. + # + # @param rule_index [Number] the index of the rule that was matched (0 for the first rule in + # the feature flag) + # @param rule_id [String] unique string identifier for the matched rule + # @return [EvaluationReason] + # @raise [ArgumentError] if `rule_index` is not a number or `rule_id` is not a string + def self.rule_match(rule_index, rule_id) + raise ArgumentError.new("rule_index must be a number") if !(rule_index.is_a? Numeric) + raise ArgumentError.new("rule_id must be a string") if !rule_id.nil? && !(rule_id.is_a? String) # in test data, ID could be nil + new(:RULE_MATCH, rule_index, rule_id, nil, nil) + end + + # Returns an instance whose {#kind} is {#PREREQUISITE_FAILED}. + # + # @param prerequisite_key [String] key of the prerequisite flag that did not return the desired variation + # @return [EvaluationReason] + # @raise [ArgumentError] if `prerequisite_key` is nil or not a string + def self.prerequisite_failed(prerequisite_key) + raise ArgumentError.new("prerequisite_key must be a string") if !(prerequisite_key.is_a? String) + new(:PREREQUISITE_FAILED, nil, nil, prerequisite_key, nil) + end + + # Returns an instance whose {#kind} is {#ERROR}. + # + # @param error_kind [Symbol] value indicating the general category of error + # @return [EvaluationReason] + # @raise [ArgumentError] if `error_kind` is not a symbol + def self.error(error_kind) + raise ArgumentError.new("error_kind must be a symbol") if !(error_kind.is_a? Symbol) + e = @@error_instances[error_kind] + e.nil? ? make_error(error_kind) : e + end + + def ==(other) + if other.is_a? EvaluationReason + @kind == other.kind && @rule_index == other.rule_index && @rule_id == other.rule_id && + @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind + elsif other.is_a? Hash + @kind.to_s == other[:kind] && @rule_index == other[:ruleIndex] && @rule_id == other[:ruleId] && + @prerequisite_key == other[:prerequisiteKey] && + (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) + end + end + + # Equivalent to {#inspect}. + # @return [String] + def to_s + inspect + end + + # Returns a concise string representation of the reason. Examples: `"FALLTHROUGH"`, + # `"ERROR(FLAG_NOT_FOUND)"`. The exact syntax is not guaranteed to remain the same; this is meant + # for debugging. + # @return [String] + def inspect + case @kind + when :RULE_MATCH + "RULE_MATCH(#{@rule_index},#{@rule_id})" + when :PREREQUISITE_FAILED + "PREREQUISITE_FAILED(#{@prerequisite_key})" + when :ERROR + "ERROR(#{@error_kind})" + else + @kind.to_s + end + end + + # Returns a hash that can be used as a JSON representation of the reason, in the format used + # in LaunchDarkly analytics events. + # @return [Hash] + def as_json(*) # parameter is unused, but may be passed if we're using the json gem + # Note that this implementation is somewhat inefficient; it allocates a new hash every time. + # However, in normal usage the SDK only serializes reasons if 1. full event tracking is + # enabled for a flag and the application called variation_detail, or 2. experimentation is + # enabled for an evaluation. We can't reuse these hashes because an application could call + # as_json and then modify the result. + case @kind + when :RULE_MATCH + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } + when :PREREQUISITE_FAILED + { kind: @kind, prerequisiteKey: @prerequisite_key } + when :ERROR + { kind: @kind, errorKind: @error_kind } + else + { kind: @kind } + end + end + + # Same as {#as_json}, but converts the JSON structure into a string. + # @return [String] + def to_json(*a) + as_json.to_json(a) + end + + # Allows this object to be treated as a hash corresponding to its JSON representation. For + # instance, if `reason.kind` is {#RULE_MATCH}, then `reason[:kind]` will be `"RULE_MATCH"` and + # `reason[:ruleIndex]` will be equal to `reason.rule_index`. + def [](key) + case key + when :kind + @kind.to_s + when :ruleIndex + @rule_index + when :ruleId + @rule_id + when :prerequisiteKey + @prerequisite_key + when :errorKind + @error_kind.nil? ? nil : @error_kind.to_s + else + nil + end + end + + private + + def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind) + @kind = kind.to_sym + @rule_index = rule_index + @rule_id = rule_id + @rule_id.freeze if !rule_id.nil? + @prerequisite_key = prerequisite_key + @prerequisite_key.freeze if !prerequisite_key.nil? + @error_kind = error_kind + end + + private_class_method :new + + def self.make_error(error_kind) + new(:ERROR, nil, nil, nil, error_kind) + end + + @@fallthrough = new(:FALLTHROUGH, nil, nil, nil, nil) + @@off = new(:OFF, nil, nil, nil, nil) + @@target_match = new(:TARGET_MATCH, nil, nil, nil, nil) + @@error_instances = { + ERROR_CLIENT_NOT_READY => make_error(ERROR_CLIENT_NOT_READY), + ERROR_FLAG_NOT_FOUND => make_error(ERROR_FLAG_NOT_FOUND), + ERROR_MALFORMED_FLAG => make_error(ERROR_MALFORMED_FLAG), + ERROR_USER_NOT_SPECIFIED => make_error(ERROR_USER_NOT_SPECIFIED), + ERROR_EXCEPTION => make_error(ERROR_EXCEPTION) + } + end end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index abcde944..0bc78552 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -29,7 +29,7 @@ def initialize(get_flag, get_segment, logger) # Helper function used internally to construct an EvaluationDetail for an error result. def self.error_result(errorKind, value = nil) - EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + EvaluationDetail.new(value, nil, EvaluationReason.error(errorKind)) end # The client's entry point for evaluating a flag. The returned `EvalResult` contains the evaluation result and @@ -43,7 +43,7 @@ def self.error_result(errorKind, value = nil) # @return [EvalResult] the evaluation result def evaluate(flag, user, event_factory) if user.nil? || user[:key].nil? - return EvalResult.new(Evaluator.error_result('USER_NOT_SPECIFIED'), []) + return EvalResult.new(Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED), []) end # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature @@ -62,7 +62,7 @@ def evaluate(flag, user, event_factory) def eval_internal(flag, user, events, event_factory) if !flag[:on] - return get_off_value(flag, { kind: 'OFF' }) + return get_off_value(flag, EvaluationReason::off) end prereq_failure_reason = check_prerequisites(flag, user, events, event_factory) @@ -74,7 +74,7 @@ def eval_internal(flag, user, events, event_factory) (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + return get_variation(flag, target[:variation], EvaluationReason::target_match) end end end @@ -84,18 +84,16 @@ def eval_internal(flag, user, events, event_factory) rules.each_index do |i| rule = rules[i] if rule_match_user(rule, user) - return get_value_for_variation_or_rollout(flag, rule, user, - { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }) + return get_value_for_variation_or_rollout(flag, rule, user, EvaluationReason::rule_match(i, rule[:id])) end end # Check the fallthrough rule if !flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, - { kind: 'FALLTHROUGH' }) + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, EvaluationReason::fallthrough) end - return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) + return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end def check_prerequisites(flag, user, events, event_factory) @@ -123,7 +121,7 @@ def check_prerequisites(flag, user, events, event_factory) end end if !prereq_ok - return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } + return EvaluationReason::prerequisite_failed(prereq_key) end end nil @@ -219,7 +217,7 @@ def segment_rule_match_user(rule, user, segment_key, salt) def get_variation(flag, index, reason) if index < 0 || index >= flag[:variations].length @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") - return Evaluator.error_result('MALFORMED_FLAG') + return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end EvaluationDetail.new(flag[:variations][index], index, reason) end @@ -235,7 +233,7 @@ def get_value_for_variation_or_rollout(flag, vr, user, reason) index = variation_index_for_user(flag, vr, user) if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") - return Evaluator.error_result('MALFORMED_FLAG') + return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end return get_variation(flag, index, reason) end diff --git a/lib/ldclient-rb/impl/model/flag.rb b/lib/ldclient-rb/impl/model/flag.rb new file mode 100644 index 00000000..a4de3d17 --- /dev/null +++ b/lib/ldclient-rb/impl/model/flag.rb @@ -0,0 +1,26 @@ + +module LaunchDarkly + module Impl + module Model + # Called after we have deserialized a flag from JSON (because we received it from LaunchDarkly, or + # read it from a persistent data store). Generates immutable instances of every parameterized + # evaluation reason that could be generated by this flag, so we can avoid creating new reason + # instances during evaluations. + def preprocess_flag_after_deserializing(flag) + prereqs = flag[:prerequisites] + if !prereqs.nil? + prereqs.each do |prereq| + prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) + end + end + rules = flag[:rules] + if !rules.nil? + rules.each_index do |i| + rule = rules[i] + rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 8b22feca..35c1bc41 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -319,7 +319,8 @@ def all_flags_state(user, options={}) details_only_if_tracked) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil, details_only_if_tracked) + state.add_flag(f, nil, nil, with_reasons ? EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION) : nil, + details_only_if_tracked) end end @@ -356,7 +357,7 @@ def create_default_data_source(sdk_key, config) # @return [EvaluationDetail] def evaluate_internal(key, user, default, event_factory) if @config.offline? - return Evaluator.error_result('CLIENT_NOT_READY', default) + return Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) end if !initialized? @@ -364,7 +365,7 @@ def evaluate_internal(key, user, default, event_factory) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - detail = Evaluator.error_result('CLIENT_NOT_READY', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end @@ -374,14 +375,14 @@ def evaluate_internal(key, user, default, event_factory) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } - detail = Evaluator.error_result('FLAG_NOT_FOUND', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_FLAG_NOT_FOUND, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } - detail = Evaluator.error_result('USER_NOT_SPECIFIED', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end @@ -401,7 +402,7 @@ def evaluate_internal(key, user, default, event_factory) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) - detail = Evaluator.error_result('EXCEPTION', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb new file mode 100644 index 00000000..6c3c713e --- /dev/null +++ b/spec/evaluation_detail_spec.rb @@ -0,0 +1,135 @@ +require "spec_helper" + +module LaunchDarkly + describe "EvaluationDetail" do + subject { EvaluationDetail } + + it "sets properties" do + expect(EvaluationDetail.new("x", 0, EvaluationReason::off).value).to eq "x" + expect(EvaluationDetail.new("x", 0, EvaluationReason::off).variation_index).to eq 0 + expect(EvaluationDetail.new("x", 0, EvaluationReason::off).reason).to eq EvaluationReason::off + end + + it "checks parameter types" do + expect { EvaluationDetail.new(nil, nil, EvaluationReason::off) }.not_to raise_error + expect { EvaluationDetail.new(nil, 0, EvaluationReason::off) }.not_to raise_error + expect { EvaluationDetail.new(nil, "x", EvaluationReason::off) }.to raise_error(ArgumentError) + expect { EvaluationDetail.new(nil, 0, { kind: "OFF" }) }.to raise_error(ArgumentError) + expect { EvaluationDetail.new(nil, 0, nil) }.to raise_error(ArgumentError) + end + + it "equality test" do + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).to eq EvaluationDetail.new("x", 0, EvaluationReason::off) + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).not_to eq EvaluationDetail.new("y", 0, EvaluationReason::off) + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).not_to eq EvaluationDetail.new("x", 1, EvaluationReason::off) + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).not_to eq EvaluationDetail.new("x", 0, EvaluationReason::fallthrough) + end + end + + describe "EvaluationReason" do + subject { EvaluationReason } + + values = [ + [ EvaluationReason::off, EvaluationReason::OFF, { "kind" => "OFF" }, "OFF", nil ], + [ EvaluationReason::fallthrough, EvaluationReason::FALLTHROUGH, + { "kind" => "FALLTHROUGH" }, "FALLTHROUGH", nil ], + [ EvaluationReason::target_match, EvaluationReason::TARGET_MATCH, + { "kind" => "TARGET_MATCH" }, "TARGET_MATCH", nil ], + [ EvaluationReason::rule_match(1, "x"), EvaluationReason::RULE_MATCH, + { "kind" => "RULE_MATCH", "ruleIndex" => 1, "ruleId" => "x" }, "RULE_MATCH(1,x)", + [ EvaluationReason::rule_match(2, "x"), EvaluationReason::rule_match(1, "y") ] ], + [ EvaluationReason::prerequisite_failed("x"), EvaluationReason::PREREQUISITE_FAILED, + { "kind" => "PREREQUISITE_FAILED", "prerequisiteKey" => "x" }, "PREREQUISITE_FAILED(x)" ], + [ EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND), EvaluationReason::ERROR, + { "kind" => "ERROR", "errorKind" => "FLAG_NOT_FOUND" }, "ERROR(FLAG_NOT_FOUND)" ] + ] + values.each_index do |i| + params = values[i] + reason = params[0] + kind = params[1] + json_rep = params[2] + brief_str = params[3] + unequal_values = params[4] + + describe "reason #{reason.kind}" do + it "has correct kind" do + expect(reason.kind).to eq kind + end + + it "equality to self" do + expect(reason).to eq reason + end + + it "inequality to others" do + values.each_index do |j| + if i != j + expect(reason).not_to eq values[j][0] + end + end + if !unequal_values.nil? + unequal_values.each do |v| + expect(reason).not_to eq v + end + end + end + + it "JSON representation" do + expect(JSON.parse(reason.as_json.to_json)).to eq json_rep + expect(JSON.parse(reason.to_json)).to eq json_rep + end + + it "brief representation" do + expect(reason.inspect).to eq brief_str + expect(reason.to_s).to eq brief_str + end + end + end + + it "reuses singleton reasons" do + expect(EvaluationReason::off).to be EvaluationReason::off + expect(EvaluationReason::fallthrough).to be EvaluationReason::fallthrough + expect(EvaluationReason::target_match).to be EvaluationReason::target_match + expect(EvaluationReason::rule_match(1, 'x')).not_to be EvaluationReason::rule_match(1, 'x') + expect(EvaluationReason::prerequisite_failed('x')).not_to be EvaluationReason::prerequisite_failed('x') + errors = [ EvaluationReason::ERROR_CLIENT_NOT_READY, EvaluationReason::ERROR_FLAG_NOT_FOUND, + EvaluationReason::ERROR_MALFORMED_FLAG, EvaluationReason::ERROR_USER_NOT_SPECIFIED, EvaluationReason::ERROR_EXCEPTION ] + errors.each do |e| + expect(EvaluationReason::error(e)).to be EvaluationReason::error(e) + end + end + + it "supports [] with JSON property names" do + expect(EvaluationReason::off[:kind]).to eq "OFF" + expect(EvaluationReason::off[:ruleIndex]).to be nil + expect(EvaluationReason::off[:ruleId]).to be nil + expect(EvaluationReason::off[:prerequisiteKey]).to be nil + expect(EvaluationReason::off[:errorKind]).to be nil + expect(EvaluationReason::rule_match(1, "x")[:ruleIndex]).to eq 1 + expect(EvaluationReason::rule_match(1, "x")[:ruleId]).to eq "x" + expect(EvaluationReason::prerequisite_failed("x")[:prerequisiteKey]).to eq "x" + expect(EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)[:errorKind]).to eq "FLAG_NOT_FOUND" + end + + it "freezes string properties" do + rm = EvaluationReason::rule_match(1, "x") + expect { rm.rule_id.upcase! }.to raise_error(FrozenError) + pf = EvaluationReason::prerequisite_failed("x") + expect { pf.prerequisite_key.upcase! }.to raise_error(FrozenError) + end + + it "checks parameter types" do + expect { EvaluationReason::rule_match(nil, "x") }.to raise_error(ArgumentError) + expect { EvaluationReason::rule_match(true, "x") }.to raise_error(ArgumentError) + expect { EvaluationReason::rule_match(1, nil) }.not_to raise_error # we allow nil rule_id for backward compatibility + expect { EvaluationReason::rule_match(1, 9) }.to raise_error(ArgumentError) + expect { EvaluationReason::prerequisite_failed(nil) }.to raise_error(ArgumentError) + expect { EvaluationReason::prerequisite_failed(9) }.to raise_error(ArgumentError) + expect { EvaluationReason::error(nil) }.to raise_error(ArgumentError) + expect { EvaluationReason::error(9) }.to raise_error(ArgumentError) + end + + it "does not allow direct access to constructor" do + expect { EvaluationReason.new(:off, nil, nil, nil, nil) }.to raise_error(NoMethodError) + end + end +end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index ee2e315b..bcbbcbdd 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -10,8 +10,7 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } - detail = EvaluationDetail.new(true, 1, - { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -22,7 +21,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -33,7 +32,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -44,7 +43,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -56,7 +55,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -78,7 +77,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) end end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 556a69f6..6fca0315 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -16,7 +16,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, { kind: 'OFF' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::off) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -30,7 +30,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'x' } - detail = EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::off) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -46,7 +46,7 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -62,7 +62,7 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -78,8 +78,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) @@ -105,8 +104,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] @@ -137,8 +135,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] @@ -167,8 +164,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] @@ -197,7 +193,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) + detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] @@ -217,7 +213,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -232,7 +228,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -247,7 +243,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -262,7 +258,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -280,7 +276,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 4672a662..5ca9b9fd 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -157,7 +157,7 @@ def event_processor value: 'value', default: 'default', trackEvents: true, - reason: { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'id' } + reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') )) client.variation('flag', user, 'default') end @@ -182,7 +182,7 @@ def event_processor value: 'value', default: 'default', trackEvents: true, - reason: { kind: 'FALLTHROUGH' } + reason: LaunchDarkly::EvaluationReason::fallthrough )) client.variation('flag', user, 'default') end @@ -194,20 +194,22 @@ def event_processor it "returns the default value if the client is offline" do result = offline_client.variation_detail("doesntmatter", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'CLIENT_NOT_READY' }) + expected = LaunchDarkly::EvaluationDetail.new("default", nil, + LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_CLIENT_NOT_READY)) expect(result).to eq expected end it "returns the default value for an unknown feature" do result = client.variation_detail("badkey", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND'}) + expected = LaunchDarkly::EvaluationDetail.new("default", nil, + LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND)) expect(result).to eq expected end it "queues a feature request event for an unknown feature" do expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: user, value: "default", default: "default", - reason: { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND' } + reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) )) client.variation_detail("badkey", user, "default") end @@ -216,7 +218,7 @@ def event_processor config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("value", 0, { kind: 'OFF' }) + expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected end @@ -225,7 +227,7 @@ def event_processor config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'OFF' }) + expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected expect(result.default_value?).to be true end @@ -243,7 +245,7 @@ def event_processor default: "default", trackEvents: true, debugEventsUntilDate: 1000, - reason: { kind: "OFF" } + reason: LaunchDarkly::EvaluationReason::off )) client.variation_detail("key", user, "default") end From 4bf9abc57189948066a41ed10144364172010b15 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 16:46:16 -0800 Subject: [PATCH 142/292] FrozenError doesn't exist in older Ruby, use more general RuntimeError --- spec/evaluation_detail_spec.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb index 6c3c713e..3d7418ed 100644 --- a/spec/evaluation_detail_spec.rb +++ b/spec/evaluation_detail_spec.rb @@ -112,9 +112,9 @@ module LaunchDarkly it "freezes string properties" do rm = EvaluationReason::rule_match(1, "x") - expect { rm.rule_id.upcase! }.to raise_error(FrozenError) + expect { rm.rule_id.upcase! }.to raise_error(RuntimeError) pf = EvaluationReason::prerequisite_failed("x") - expect { pf.prerequisite_key.upcase! }.to raise_error(FrozenError) + expect { pf.prerequisite_key.upcase! }.to raise_error(RuntimeError) end it "checks parameter types" do From d8f5263e4d72e265eb5eec16a9cf93a97f83ab56 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 20:19:56 -0800 Subject: [PATCH 143/292] precompute evaluation reasons when we receive a flag --- lib/ldclient-rb/impl/evaluator.rb | 7 ++- .../impl/integrations/consul_impl.rb | 10 +-- .../impl/integrations/dynamodb_impl.rb | 10 +-- .../impl/integrations/redis_impl.rb | 10 ++- lib/ldclient-rb/impl/model/flag.rb | 26 -------- lib/ldclient-rb/impl/model/serialization.rb | 62 +++++++++++++++++++ lib/ldclient-rb/polling.rb | 5 +- lib/ldclient-rb/requestor.rb | 15 +++-- lib/ldclient-rb/stream.rb | 17 +++-- spec/impl/evaluator_rule_spec.rb | 12 ++++ spec/impl/evaluator_spec.rb | 20 +++++- spec/impl/model/serialization_spec.rb | 41 ++++++++++++ spec/polling_spec.rb | 4 +- spec/requestor_spec.rb | 22 +++---- 14 files changed, 186 insertions(+), 75 deletions(-) delete mode 100644 lib/ldclient-rb/impl/model/flag.rb create mode 100644 lib/ldclient-rb/impl/model/serialization.rb create mode 100644 spec/impl/model/serialization_spec.rb diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 0bc78552..aa2b9e12 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -84,7 +84,9 @@ def eval_internal(flag, user, events, event_factory) rules.each_index do |i| rule = rules[i] if rule_match_user(rule, user) - return get_value_for_variation_or_rollout(flag, rule, user, EvaluationReason::rule_match(i, rule[:id])) + reason = rule[:_reason] # try to use cached reason for this rule + reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? + return get_value_for_variation_or_rollout(flag, rule, user, reason) end end @@ -121,7 +123,8 @@ def check_prerequisites(flag, user, events, event_factory) end end if !prereq_ok - return EvaluationReason::prerequisite_failed(prereq_key) + reason = prerequisite[:_reason] # try to use cached reason + return reason.nil? ? EvaluationReason::prerequisite_failed(prereq_key) : reason end end nil diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 10c16dbc..2f186dab 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -39,7 +39,7 @@ def init_internal(all_data) # Insert or update every provided item all_data.each do |kind, items| items.values.each do |item| - value = item.to_json + value = Model.serialize(kind, item) key = item_key(kind, item[:key]) ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => value } }) unused_old_keys.delete(key) @@ -62,7 +62,7 @@ def init_internal(all_data) def get_internal(kind, key) value = Diplomat::Kv.get(item_key(kind, key), {}, :return) # :return means "don't throw an error if not found" - (value.nil? || value == "") ? nil : JSON.parse(value, symbolize_names: true) + (value.nil? || value == "") ? nil : Model.deserialize(kind, value) end def get_all_internal(kind) @@ -71,7 +71,7 @@ def get_all_internal(kind) (results == "" ? [] : results).each do |result| value = result[:value] if !value.nil? - item = JSON.parse(value, symbolize_names: true) + item = Model.deserialize(kind, value) items_out[item[:key].to_sym] = item end end @@ -80,7 +80,7 @@ def get_all_internal(kind) def upsert_internal(kind, new_item) key = item_key(kind, new_item[:key]) - json = new_item.to_json + json = Model.serialize(kind, new_item) # We will potentially keep retrying indefinitely until someone's write succeeds while true @@ -88,7 +88,7 @@ def upsert_internal(kind, new_item) if old_value.nil? || old_value == "" mod_index = 0 else - old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) + old_item = Model.deserialize(kind, old_value[0]["Value"]) # Check whether the item is stale. If so, don't do the update (and return the existing item to # FeatureStoreWrapper so it can be cached) if old_item[:version] >= new_item[:version] diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index a76fae52..464eb5e4 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -77,7 +77,7 @@ def init_internal(all_data) def get_internal(kind, key) resp = get_item_by_keys(namespace_for_kind(kind), key) - unmarshal_item(resp.item) + unmarshal_item(kind, resp.item) end def get_all_internal(kind) @@ -86,7 +86,7 @@ def get_all_internal(kind) while true resp = @client.query(req) resp.items.each do |item| - item_out = unmarshal_item(item) + item_out = unmarshal_item(kind, item) items_out[item_out[:key].to_sym] = item_out end break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 @@ -196,15 +196,15 @@ def read_existing_keys(kinds) def marshal_item(kind, item) make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ VERSION_ATTRIBUTE => item[:version], - ITEM_JSON_ATTRIBUTE => item.to_json + ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item) }) end - def unmarshal_item(item) + def unmarshal_item(kind, item) return nil if item.nil? || item.length == 0 json_attr = item[ITEM_JSON_ATTRIBUTE] raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? - JSON.parse(json_attr, symbolize_names: true) + Model.deserialize(kind, json_attr) end end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 107340f8..ac16a976 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -53,7 +53,7 @@ def init_internal(all_data) multi.del(items_key(kind)) count = count + items.count items.each do |key, item| - multi.hset(items_key(kind), key, item.to_json) + multi.hset(items_key(kind), key, Model.serialize(kind,item)) end end multi.set(inited_key, inited_key) @@ -73,8 +73,7 @@ def get_all_internal(kind) with_connection do |redis| hashfs = redis.hgetall(items_key(kind)) hashfs.each do |k, json_item| - f = JSON.parse(json_item, symbolize_names: true) - fs[k.to_sym] = f + fs[k.to_sym] = Model.deserialize(kind, json_item) end end fs @@ -93,7 +92,7 @@ def upsert_internal(kind, new_item) before_update_transaction(base_key, key) if old_item.nil? || old_item[:version] < new_item[:version] result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) + multi.hset(base_key, key, Model.serialize(kind, new_item)) end if result.nil? @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } @@ -145,8 +144,7 @@ def with_connection end def get_redis(redis, kind, key) - json_item = redis.hget(items_key(kind), key) - json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + Model.deserialize(kind, redis.hget(items_key(kind), key)) end end end diff --git a/lib/ldclient-rb/impl/model/flag.rb b/lib/ldclient-rb/impl/model/flag.rb deleted file mode 100644 index a4de3d17..00000000 --- a/lib/ldclient-rb/impl/model/flag.rb +++ /dev/null @@ -1,26 +0,0 @@ - -module LaunchDarkly - module Impl - module Model - # Called after we have deserialized a flag from JSON (because we received it from LaunchDarkly, or - # read it from a persistent data store). Generates immutable instances of every parameterized - # evaluation reason that could be generated by this flag, so we can avoid creating new reason - # instances during evaluations. - def preprocess_flag_after_deserializing(flag) - prereqs = flag[:prerequisites] - if !prereqs.nil? - prereqs.each do |prereq| - prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) - end - end - rules = flag[:rules] - if !rules.nil? - rules.each_index do |i| - rule = rules[i] - rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) - end - end - end - end - end -end diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb new file mode 100644 index 00000000..fcf8b135 --- /dev/null +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -0,0 +1,62 @@ + +module LaunchDarkly + module Impl + module Model + # Abstraction of deserializing a feature flag or segment that was read from a data store or + # received from LaunchDarkly. + def self.deserialize(kind, json) + return nil if json.nil? + item = JSON.parse(json, symbolize_names: true) + postprocess_item_after_deserializing!(kind, item) + item + end + + # Abstraction of serializing a feature flag or segment that will be written to a data store. + # Currently we just call to_json. + def self.serialize(kind, item) + item.to_json + end + + # Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format. + def self.make_all_store_data(received_data) + flags = received_data[:flags] + postprocess_items_after_deserializing!(FEATURES, flags) + segments = received_data[:segments] + postprocess_items_after_deserializing!(SEGMENTS, segments) + { FEATURES => flags, SEGMENTS => segments } + end + + # Called after we have deserialized a model item from JSON (because we received it from LaunchDarkly, + # or read it from a persistent data store). This allows us to precompute some derived attributes that + # will never change during the lifetime of that item. + def self.postprocess_item_after_deserializing!(kind, item) + return if !item + # Currently we are special-casing this for FEATURES; eventually it will be handled by delegating + # to the "kind" object or the item class. + if kind.eql? FEATURES + # For feature flags, we precompute all possible parameterized EvaluationReason instances. + prereqs = item[:prerequisites] + if !prereqs.nil? + prereqs.each do |prereq| + prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) + end + end + rules = item[:rules] + if !rules.nil? + rules.each_index do |i| + rule = rules[i] + rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) + end + end + end + end + + def self.postprocess_items_after_deserializing!(kind, items_map) + return items_map if !items_map + items_map.each do |key, item| + postprocess_item_after_deserializing!(kind, item) + end + end + end + end +end diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index da0427dc..a9312413 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -37,10 +37,7 @@ def stop def poll all_data = @requestor.request_all_data if all_data - @config.feature_store.init({ - FEATURES => all_data[:flags], - SEGMENTS => all_data[:segments] - }) + @config.feature_store.init(all_data) if @initialized.make_true @config.logger.info { "[LDClient] Polling connection initialized" } @ready.set diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index f7174787..3f085c7c 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/impl/model/serialization" + require "concurrent/atomics" require "json" require "uri" @@ -27,15 +29,16 @@ def initialize(sdk_key, config) end def request_flag(key) - make_request("/sdk/latest-flags/" + key) + request_single_item(FEATURES, "/sdk/latest-flags/" + key) end def request_segment(key) - make_request("/sdk/latest-segments/" + key) + request_single_item(SEGMENTS, "/sdk/latest-segments/" + key) end def request_all_data() - make_request("/sdk/latest-all") + all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true) + Impl::Model.make_all_store_data(all_data) end def stop @@ -47,6 +50,10 @@ def stop private + def request_single_item(kind, path) + Impl::Model.deserialize(kind, make_request(path)) + end + def make_request(path) @client.start if !@client.started? uri = URI(@config.base_uri + path) @@ -73,7 +80,7 @@ def make_request(path) etag = res["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end - JSON.parse(body, symbolize_names: true) + body end def fix_encoding(body, content_type) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index ddb7f669..9add0593 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/impl/model/serialization" + require "concurrent/atomics" require "json" require "ld-eventsource" @@ -86,10 +88,8 @@ def process_message(message) @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT message = JSON.parse(message.data, symbolize_names: true) - @feature_store.init({ - FEATURES => message[:data][:flags], - SEGMENTS => message[:data][:segments] - }) + all_data = Impl::Model.make_all_store_data(message[:data]) + @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @ready.set @@ -98,7 +98,9 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - @feature_store.upsert(kind, data[:data]) + data = data[:data] + Impl::Model.postprocess_item_after_deserializing!(kind, data) + @feature_store.upsert(kind, data) break end end @@ -113,10 +115,7 @@ def process_message(message) end elsif method == INDIRECT_PUT all_data = @requestor.request_all_data - @feature_store.init({ - FEATURES => all_data[:flags], - SEGMENTS => all_data[:segments] - }) + @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized (via indirect message)" } elsif method == INDIRECT_PATCH diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index bcbbcbdd..a1ae5d66 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -16,6 +16,18 @@ module Impl expect(result.events).to eq(nil) end + it "reuses rule match reason instances if possible" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) + Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached rule match reason + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result1 = basic_evaluator.evaluate(flag, user, factory) + result2 = basic_evaluator.evaluate(flag, user, factory) + expect(result1.detail.reason.rule_id).to eq 'ruleid' + expect(result1.detail.reason).to be result2.detail.reason + end + it "returns an error if rule variation is too high" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } flag = boolean_flag_with_rules([rule]) diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 6fca0315..dcf8928b 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -3,7 +3,7 @@ module LaunchDarkly module Impl - describe "Evaluator", :evaluator_spec_base => true do + describe "Evaluator (general)", :evaluator_spec_base => true do subject { Evaluator } describe "evaluate" do @@ -85,6 +85,24 @@ module Impl expect(result.events).to eq(nil) end + it "reuses prerequisite-failed reason instances if possible" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'badfeature', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason + user = { key: 'x' } + e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + result1 = e.evaluate(flag, user, factory) + expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') + result2 = e.evaluate(flag, user, factory) + expect(result2.detail.reason).to be result1.detail.reason + end + it "returns off variation and event if prerequisite of a prerequisite is not found" do flag = { key: 'feature0', diff --git a/spec/impl/model/serialization_spec.rb b/spec/impl/model/serialization_spec.rb new file mode 100644 index 00000000..0a26bcd5 --- /dev/null +++ b/spec/impl/model/serialization_spec.rb @@ -0,0 +1,41 @@ +require "spec_helper" + +module LaunchDarkly + module Impl + module Model + describe "model serialization" do + it "serializes flag" do + flag = { key: "flagkey", version: 1 } + json = Model.serialize(FEATURES, flag) + expect(JSON.parse(json, symbolize_names: true)).to eq flag + end + + it "serializes segment" do + segment = { key: "segkey", version: 1 } + json = Model.serialize(SEGMENTS, segment) + expect(JSON.parse(json, symbolize_names: true)).to eq segment + end + + it "serializes arbitrary data kind" do + thing = { key: "thingkey", name: "me" } + json = Model.serialize({ name: "things" }, thing) + expect(JSON.parse(json, symbolize_names: true)).to eq thing + end + + it "deserializes flag with no rules or prerequisites" do + flag_in = { key: "flagkey", version: 1 } + json = Model.serialize(FEATURES, flag_in) + flag_out = Model.deserialize(FEATURES, json) + expect(flag_out).to eq flag_in + end + + it "deserializes segment" do + segment_in = { key: "segkey", version: 1 } + json = Model.serialize(SEGMENTS, segment_in) + segment_out = Model.deserialize(SEGMENTS, json) + expect(segment_out).to eq segment_in + end + end + end + end +end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 690147d0..d4a1d9bc 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -19,10 +19,10 @@ def with_processor(store) flag = { key: 'flagkey', version: 1 } segment = { key: 'segkey', version: 1 } all_data = { - flags: { + LaunchDarkly::FEATURES => { flagkey: flag }, - segments: { + LaunchDarkly::SEGMENTS => { segkey: segment } } diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 502f6d86..aaed0a92 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -32,7 +32,7 @@ def with_requestor(base_uri) with_requestor(server.base_uri.to_s) do |requestor| server.setup_ok_response("/", expected_data.to_json) data = requestor.request_all_data() - expect(data).to eq expected_data + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data) end end end @@ -75,7 +75,7 @@ def with_requestor(base_uri) data = requestor.request_all_data() expect(server.requests.count).to eq 2 expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) - expect(data).to eq expected_data + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data) end end end @@ -93,14 +93,14 @@ def with_requestor(base_uri) res["ETag"] = etag1 end data = requestor.request_all_data() - expect(data).to eq expected_data1 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data1) expect(server.requests.count).to eq 1 server.setup_response("/") do |req, res| res.status = 304 end data = requestor.request_all_data() - expect(data).to eq expected_data1 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data1) expect(server.requests.count).to eq 2 expect(server.requests[1].header).to include({ "if-none-match" => [ etag1 ] }) @@ -110,7 +110,7 @@ def with_requestor(base_uri) res["ETag"] = etag2 end data = requestor.request_all_data() - expect(data).to eq expected_data2 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data2) expect(server.requests.count).to eq 3 expect(server.requests[2].header).to include({ "if-none-match" => [ etag1 ] }) @@ -118,7 +118,7 @@ def with_requestor(base_uri) res.status = 304 end data = requestor.request_all_data() - expect(data).to eq expected_data2 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data2) expect(server.requests.count).to eq 4 expect(server.requests[3].header).to include({ "if-none-match" => [ etag2 ] }) end @@ -131,7 +131,7 @@ def with_requestor(base_uri) server.setup_ok_response("/sdk/latest-all", content, "application/json") with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(JSON.parse(content, symbolize_names: true)) + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(JSON.parse(content, symbolize_names: true))) end end end @@ -143,7 +143,7 @@ def with_requestor(base_uri) "text/plain; charset=ISO-8859-2") with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(JSON.parse(content, symbolize_names: true)) + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(JSON.parse(content, symbolize_names: true))) end end end @@ -160,15 +160,15 @@ def with_requestor(base_uri) end it "can use a proxy server" do - content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + expected_data = { flags: { flagkey: { key: "flagkey" } } } with_server do |server| - server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + server.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json", { "etag" => "x" }) with_server(StubProxyServer.new) do |proxy| begin ENV["http_proxy"] = proxy.base_uri.to_s with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(JSON.parse(content, symbolize_names: true)) + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(expected_data)) end ensure ENV["http_proxy"] = nil From 4ffd4fcf402bf4a1acb9fe3a45e0d2c71b00ce7e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 20:29:22 -0800 Subject: [PATCH 144/292] rm unused --- lib/ldclient-rb/impl/model/flag.rb | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 lib/ldclient-rb/impl/model/flag.rb diff --git a/lib/ldclient-rb/impl/model/flag.rb b/lib/ldclient-rb/impl/model/flag.rb deleted file mode 100644 index a4de3d17..00000000 --- a/lib/ldclient-rb/impl/model/flag.rb +++ /dev/null @@ -1,26 +0,0 @@ - -module LaunchDarkly - module Impl - module Model - # Called after we have deserialized a flag from JSON (because we received it from LaunchDarkly, or - # read it from a persistent data store). Generates immutable instances of every parameterized - # evaluation reason that could be generated by this flag, so we can avoid creating new reason - # instances during evaluations. - def preprocess_flag_after_deserializing(flag) - prereqs = flag[:prerequisites] - if !prereqs.nil? - prereqs.each do |prereq| - prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) - end - end - rules = flag[:rules] - if !rules.nil? - rules.each_index do |i| - rule = rules[i] - rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) - end - end - end - end - end -end From 365dddc56ba542e17da1d24d53abbd25305de8d6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 3 Jan 2020 11:48:27 -0800 Subject: [PATCH 145/292] rename FeatureStore to DataStore --- lib/ldclient-rb/config.rb | 24 +++---- lib/ldclient-rb/file_data_source.rb | 8 +-- .../impl/integrations/consul_impl.rb | 10 +-- .../impl/integrations/dynamodb_impl.rb | 8 +-- .../impl/integrations/redis_impl.rb | 14 ++-- lib/ldclient-rb/impl/store_client_wrapper.rb | 8 +-- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 +- lib/ldclient-rb/in_memory_store.rb | 10 +-- lib/ldclient-rb/integrations/consul.rb | 12 ++-- lib/ldclient-rb/integrations/dynamodb.rb | 20 +++--- lib/ldclient-rb/integrations/redis.rb | 22 +++--- .../integrations/util/store_wrapper.rb | 24 +++---- lib/ldclient-rb/interfaces.rb | 16 ++--- lib/ldclient-rb/ldclient.rb | 14 ++-- lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/redis_store.rb | 16 ++--- lib/ldclient-rb/stream.rb | 14 ++-- ...e_spec_base.rb => data_store_spec_base.rb} | 4 +- spec/file_data_source_spec.rb | 2 +- spec/in_memory_data_store_spec.rb | 12 ++++ spec/in_memory_feature_store_spec.rb | 12 ---- ...tore_spec.rb => consul_data_store_spec.rb} | 12 ++-- ...re_spec.rb => dynamodb_data_store_spec.rb} | 12 ++-- spec/ldclient_spec.rb | 72 +++++++++---------- spec/polling_spec.rb | 14 ++-- spec/redis_feature_store_spec.rb | 14 ++-- spec/stream_spec.rb | 16 ++--- 27 files changed, 199 insertions(+), 199 deletions(-) rename spec/{feature_store_spec_base.rb => data_store_spec_base.rb} (97%) create mode 100644 spec/in_memory_data_store_spec.rb delete mode 100644 spec/in_memory_feature_store_spec.rb rename spec/integrations/{consul_feature_store_spec.rb => consul_data_store_spec.rb} (65%) rename spec/integrations/{dynamodb_feature_store_spec.rb => dynamodb_data_store_spec.rb} (85%) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c7c42e56..935abdad 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -23,7 +23,7 @@ class Config # @option opts [Float] :read_timeout (10) See {#read_timeout}. # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. # @option opts [Object] :cache_store See {#cache_store}. - # @option opts [Object] :feature_store See {#feature_store}. + # @option opts [Object] :data_store See {#data_store}. # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. # @option opts [Boolean] :offline (false) See {#offline?}. # @option opts [Float] :poll_interval (30) See {#poll_interval}. @@ -48,7 +48,7 @@ def initialize(opts = {}) @flush_interval = opts[:flush_interval] || Config.default_flush_interval @connect_timeout = opts[:connect_timeout] || Config.default_connect_timeout @read_timeout = opts[:read_timeout] || Config.default_read_timeout - @feature_store = opts[:feature_store] || Config.default_feature_store + @data_store = opts[:data_store] || Config.default_data_store @stream = opts.has_key?(:stream) ? opts[:stream] : Config.default_stream @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @@ -98,9 +98,9 @@ def stream? # # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not # use polling or streaming to get feature flag updates from the server, but instead reads them - # from the {#feature_store feature store}, which is assumed to be a database that is populated by + # from the {#data_store data store}, which is assumed to be a database that is populated by # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) - # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # and ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # All other properties related to streaming or polling are ignored if this option is set to true. # @@ -176,13 +176,13 @@ def offline? # # A store for feature flags and related data. The client uses it to store all data received # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to - # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. + # {InMemoryDataStore}; for other implementations, see {LaunchDarkly::Integrations}. # - # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more information, see ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # @return [LaunchDarkly::Interfaces::FeatureStore] + # @return [LaunchDarkly::Interfaces::DataStore] # - attr_reader :feature_store + attr_reader :data_store # # True if all user attributes (other than the key) should be considered private. This means @@ -361,11 +361,11 @@ def self.default_use_ldd end # - # The default value for {#feature_store}. - # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} + # The default value for {#data_store}. + # @return [LaunchDarkly::Interfaces::DataStore] an {InMemoryDataStore} # - def self.default_feature_store - InMemoryFeatureStore.new + def self.default_data_store + InMemoryDataStore.new end # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index cfea75f7..6cc0dc39 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -118,14 +118,14 @@ class FileDataSource # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.data_store, config.logger, options) } end end # @private class FileDataSourceImpl - def initialize(feature_store, logger, options={}) - @feature_store = feature_store + def initialize(data_store, logger, options={}) + @data_store = data_store @logger = logger @paths = options[:paths] || [] if @paths.is_a? String @@ -187,7 +187,7 @@ def load_all return end end - @feature_store.init(all_data) + @data_store.init(all_data) @initialized.make_true end diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 2f186dab..34aea72c 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module Consul # - # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Consul data store, intended to be used with CachingStoreWrapper. # - class ConsulFeatureStoreCore + class ConsulDataStoreCore begin require "diplomat" CONSUL_ENABLED = true @@ -17,14 +17,14 @@ class ConsulFeatureStoreCore def initialize(opts) if !CONSUL_ENABLED - raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") + raise RuntimeError.new("can't use Consul data store without the 'diplomat' gem") end @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? Diplomat.configuration.url = opts[:url] if !opts[:url].nil? - @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") + @logger.info("ConsulDataStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) @@ -90,7 +90,7 @@ def upsert_internal(kind, new_item) else old_item = Model.deserialize(kind, old_value[0]["Value"]) # Check whether the item is stale. If so, don't do the update (and return the existing item to - # FeatureStoreWrapper so it can be cached) + # DataStoreWrapper so it can be cached) if old_item[:version] >= new_item[:version] return old_item end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 464eb5e4..fb3a6bd4 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module DynamoDB # - # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # Internal implementation of the DynamoDB data store, intended to be used with CachingStoreWrapper. # - class DynamoDBFeatureStoreCore + class DynamoDBDataStoreCore begin require "aws-sdk-dynamodb" AWS_SDK_ENABLED = true @@ -28,7 +28,7 @@ class DynamoDBFeatureStoreCore def initialize(table_name, opts) if !AWS_SDK_ENABLED - raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + raise RuntimeError.new("can't use DynamoDB data store without the aws-sdk or aws-sdk-dynamodb gem") end @table_name = table_name @@ -41,7 +41,7 @@ def initialize(table_name, opts) @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end - @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + @logger.info("DynamoDBDataStore: using DynamoDB table \"#{table_name}\"") end def init_internal(all_data) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index ac16a976..e58dfd07 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -6,9 +6,9 @@ module Impl module Integrations module Redis # - # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Redis data store, intended to be used with CachingStoreWrapper. # - class RedisFeatureStoreCore + class RedisDataStoreCore begin require "redis" require "connection_pool" @@ -19,7 +19,7 @@ class RedisFeatureStoreCore def initialize(opts) if !REDIS_ENABLED - raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + raise RuntimeError.new("can't use Redis data store because one of these gems is missing: redis, connection_pool") end @redis_opts = opts[:redis_opts] || Hash.new @@ -40,7 +40,7 @@ def initialize(opts) @stopped = Concurrent::AtomicBoolean.new(false) with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + @logger.info("RedisDataStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ and prefix: #{@prefix}") end end @@ -59,7 +59,7 @@ def init_internal(all_data) multi.set(inited_key, inited_key) end end - @logger.info { "RedisFeatureStore: initialized with #{count} items" } + @logger.info { "RedisDataStore: initialized with #{count} items" } end def get_internal(kind, key) @@ -95,13 +95,13 @@ def upsert_internal(kind, new_item) multi.hset(base_key, key, Model.serialize(kind, new_item)) end if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + @logger.debug { "RedisDataStore: concurrent modification detected, retrying" } try_again = true end else final_item = old_item action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + @logger.warn { "RedisDataStore: attempted to #{action} #{key} version: #{old_item[:version]} \ in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb index f0948251..8c3160f1 100644 --- a/lib/ldclient-rb/impl/store_client_wrapper.rb +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -4,19 +4,19 @@ module LaunchDarkly module Impl # - # Provides additional behavior that the client requires before or after feature store operations. + # Provides additional behavior that the client requires before or after data store operations. # Currently this just means sorting the data set for init(). In the future we may also use this # to provide an update listener capability. # - class FeatureStoreClientWrapper - include Interfaces::FeatureStore + class DataStoreClientWrapper + include Interfaces::DataStore def initialize(store) @store = store end def init(all_data) - @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) + @store.init(DataStoreDataSetSorter.sort_all_collections(all_data)) end def get(kind, key) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4454fe75..6dad1b36 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -2,10 +2,10 @@ module LaunchDarkly module Impl # - # Implements a dependency graph ordering for data to be stored in a feature store. We must use this - # on every data set that will be passed to the feature store's init() method. + # Implements a dependency graph ordering for data to be stored in a data store. We must use this + # on every data set that will be passed to the data store's init() method. # - class FeatureStoreDataSetSorter + class DataStoreDataSetSorter # # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 576d90c7..d3bee07e 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -2,12 +2,12 @@ module LaunchDarkly - # These constants denote the types of data that can be stored in the feature store. If + # These constants denote the types of data that can be stored in the data store. If # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. # - # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter + # The :priority and :get_dependency_keys properties are used by DataStoreDataSetSorter # to ensure data consistency during non-atomic updates. # @private @@ -24,12 +24,12 @@ module LaunchDarkly }.freeze # - # Default implementation of the LaunchDarkly client's feature store, using an in-memory + # Default implementation of the LaunchDarkly client's data store, using an in-memory # cache. This object holds feature flags and related data received from LaunchDarkly. # Database-backed implementations are available in {LaunchDarkly::Integrations}. # - class InMemoryFeatureStore - include LaunchDarkly::Interfaces::FeatureStore + class InMemoryDataStore + include LaunchDarkly::Interfaces::DataStore def initialize @items = Hash.new diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 4f32d5fd..0ecf69f8 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -5,7 +5,7 @@ module LaunchDarkly module Integrations module Consul # - # Default value for the `prefix` option for {new_feature_store}. + # Default value for the `prefix` option for {new_data_store}. # # @return [String] the default key prefix # @@ -14,10 +14,10 @@ def self.default_prefix end # - # Creates a Consul-backed persistent feature store. + # Creates a Consul-backed persistent data store. # # To use this method, you must first install the gem `diplomat`. Then, put the object returned by - # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). + # this method into the `data_store` property of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default @@ -27,10 +27,10 @@ def self.default_prefix # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # @return [LaunchDarkly::Interfaces::DataStore] a data store object # - def self.new_feature_store(opts, &block) - core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) + def self.new_data_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulDataStoreCore.new(opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 189e118f..dddf38f0 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,17 +5,17 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can - # use a persistent feature store, see the + # Creates a DynamoDB-backed persistent data store. For more details about how and why you can + # use a persistent data store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or - # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property + # the full `aws-sdk`. Then, put the object returned by this method into the `data_store` property # of your client configuration ({LaunchDarkly::Config}). # - # @example Configuring the feature store - # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") - # config = LaunchDarkly::Config.new(feature_store: store) + # @example Configuring the data store + # store = LaunchDarkly::Integrations::DynamoDB::new_data_store("my-table-name") + # config = LaunchDarkly::Config.new(data_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # Note that the specified table must already exist in DynamoDB. It must have a partition key called @@ -31,15 +31,15 @@ module DynamoDB # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) - # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the data store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # @return [LaunchDarkly::Interfaces::DataStore] a data store object # - def self.new_feature_store(table_name, opts) - core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) + def self.new_data_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBDataStoreCore.new(table_name, opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 7e447657..f7437b22 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -4,7 +4,7 @@ module LaunchDarkly module Integrations module Redis # - # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Default value for the `redis_url` option for {new_data_store}. This points to an instance of # Redis running at `localhost` with its default port. # # @return [String] the default Redis URL @@ -14,7 +14,7 @@ def self.default_redis_url end # - # Default value for the `prefix` option for {new_feature_store}. + # Default value for the `prefix` option for {new_data_store}. # # @return [String] the default key prefix # @@ -23,17 +23,17 @@ def self.default_prefix end # - # Creates a Redis-backed persistent feature store. For more details about how and why you can - # use a persistent feature store, see the + # Creates a Redis-backed persistent data store. For more details about how and why you can + # use a persistent data store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `feature_store` property of your + # put the object returned by this method into the `data_store` property of your # client configuration. # - # @example Configuring the feature store - # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") - # config = LaunchDarkly::Config.new(feature_store: store) + # @example Configuring the data store + # store = LaunchDarkly::Integrations::Redis::new_data_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(data_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options @@ -45,10 +45,10 @@ def self.default_prefix # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # @return [LaunchDarkly::Interfaces::DataStore] a data store object # - def self.new_feature_store(opts) - return RedisFeatureStore.new(opts) + def self.new_data_store(opts) + return RedisDataStore.new(opts) end end end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 26318d67..c9ff5bcf 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -6,22 +6,22 @@ module LaunchDarkly module Integrations module Util # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::DataStore} # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every feature store implementation. + # behavior and other logic that would otherwise be repeated in every data store implementation. # This makes it easier to create new database integrations by implementing only the database-specific # logic. # - # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # The mixin {DataStoreCore} describes the methods that need to be supported by the inner # implementation object. # class CachingStoreWrapper - include LaunchDarkly::Interfaces::FeatureStore + include LaunchDarkly::Interfaces::DataStore # # Creates a new store wrapper instance. # - # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param core [Object] an object that implements the {DataStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache @@ -146,9 +146,9 @@ def items_if_not_deleted(items) # This module describes the methods that you must implement on your own object in order to # use {CachingStoreWrapper}. # - module FeatureStoreCore + module DataStoreCore # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::DataStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # # If possible, the store should update the entire data set atomically. If that is not possible, @@ -164,7 +164,7 @@ def init_internal(all_data) end # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::DataStore#get} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -177,7 +177,7 @@ def get_internal(kind, key) end # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::DataStore#all} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -190,13 +190,13 @@ def get_all_internal(kind) end # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::DataStore#upsert} # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. # the method is expected to return the final state of the entity (i.e. either the `item` # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # - # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # Note that DataStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. # # @param kind [Object] the kind of entity to add or update @@ -208,7 +208,7 @@ def upsert_internal(kind, item) # # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # {LaunchDarkly::Interfaces::DataStore#initialized?} except that there is less of a concern # for efficiency, because the wrapper will use caching and memoization in order to call the method # as little as possible. # diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index d2a9f862..36bdcd94 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -5,13 +5,13 @@ module LaunchDarkly # module Interfaces # - # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly - # client uses the feature store to persist feature flags and related objects received from + # Mixin that defines the required methods of a data store implementation. The LaunchDarkly + # client uses the data store to persist feature flags and related objects received from # the LaunchDarkly service. Implementations must support concurrent access and updates. - # For more about how feature stores can be used, see: - # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more about how data stores can be used, see: + # [Using a persistent data store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # An entity that can be stored in a feature store is a hash that can be converted to and from + # An entity that can be stored in a data store is a hash that can be converted to and from # JSON, and that has at a minimum the following properties: `:key`, a string that is unique # among entities of the same kind; `:version`, an integer that is higher for newer data; # `:deleted`, a boolean (optional, defaults to false) that if true means this is a @@ -22,12 +22,12 @@ module Interfaces # `:namespace`, which is a short string unique to that kind. This string can be used as a # collection name or a key prefix. # - # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations + # The default implementation is {LaunchDarkly::InMemoryDataStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # - module FeatureStore + module DataStore # # Initializes (or re-initializes) the store with the specified set of entities. Any # existing entries will be removed. Implementations can assume that this data set is up to @@ -116,7 +116,7 @@ def stop # # Mixin that defines the required methods of a data source implementation. This is the # component that delivers feature flag data from LaunchDarkly to the LDClient by putting - # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. + # the data in the {DataStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 35c1bc41..eed490a9 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -37,13 +37,13 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @event_factory_default = EventFactory.new(false) @event_factory_with_reasons = EventFactory.new(true) - # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add + # We need to wrap the data store object with a DataStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses - # the feature store through the Config object, so we need to make a new Config that uses + # the data store through the Config object, so we need to make a new Config that uses # the wrapped store. - @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) + @store = Impl::DataStoreClientWrapper.new(config.data_store) updated_config = config.clone - updated_config.instance_variable_set(:@feature_store, @store) + updated_config.instance_variable_set(:@data_store, @store) @config = updated_config get_flag = lambda { |key| @store.get(FEATURES, key) } @@ -127,7 +127,7 @@ def secure_mode_hash(user) # given up permanently (for instance, if your SDK key is invalid). In the meantime, # any call to {#variation} or {#variation_detail} will behave as follows: # - # 1. It will check whether the feature store already contains data (that is, you + # 1. It will check whether the data store already contains data (that is, you # are using a database-backed store and it was populated by a previous run of this # application). If so, it will use the last known feature flag data. # @@ -362,9 +362,9 @@ def evaluate_internal(key, user, default, event_factory) if !initialized? if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from data store" } else - @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } + @config.logger.error { "[LDClient] Client has not finished initializing; data store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index a9312413..5cbc220a 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -37,7 +37,7 @@ def stop def poll all_data = @requestor.request_all_data if all_data - @config.feature_store.init(all_data) + @config.data_store.init(all_data) if @initialized.make_true @config.logger.info { "[LDClient] Polling connection initialized" } @ready.set diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 48632411..128336b0 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -3,28 +3,28 @@ module LaunchDarkly # - # An implementation of the LaunchDarkly client's feature store that uses a Redis + # An implementation of the LaunchDarkly client's data store that uses a Redis # instance. This object holds feature flags and related data received from the # streaming API. Feature data can also be further cached in memory to reduce overhead # of calls to Redis. # # To use this class, you must first have the `redis` and `connection-pool` gems - # installed. Then, create an instance and store it in the `feature_store` property + # installed. Then, create an instance and store it in the `data_store` property # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may be changed or removed in the future. # - class RedisFeatureStore - include LaunchDarkly::Interfaces::FeatureStore + class RedisDataStore + include LaunchDarkly::Interfaces::DataStore # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating - # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical - # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate + # to RedisDataStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisDataStore.new. In the future, we will migrate # away from exposing these concrete classes and use factory methods instead. # - # Constructor for a RedisFeatureStore instance. + # Constructor for a RedisDataStore instance. # # @param opts [Hash] the configuration options # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts) @@ -37,7 +37,7 @@ class RedisFeatureStore # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) - core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisDataStoreCore.new(opts) @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 9add0593..b5962e00 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -29,7 +29,7 @@ class StreamProcessor def initialize(sdk_key, config, requestor) @sdk_key = sdk_key @config = config - @feature_store = config.feature_store + @data_store = config.data_store @requestor = requestor @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) @@ -89,7 +89,7 @@ def process_message(message) if method == PUT message = JSON.parse(message.data, symbolize_names: true) all_data = Impl::Model.make_all_store_data(message[:data]) - @feature_store.init(all_data) + @data_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @ready.set @@ -100,7 +100,7 @@ def process_message(message) if key data = data[:data] Impl::Model.postprocess_item_after_deserializing!(kind, data) - @feature_store.upsert(kind, data) + @data_store.upsert(kind, data) break end end @@ -109,23 +109,23 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - @feature_store.delete(kind, key, data[:version]) + @data_store.delete(kind, key, data[:version]) break end end elsif method == INDIRECT_PUT all_data = @requestor.request_all_data - @feature_store.init(all_data) + @data_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized (via indirect message)" } elsif method == INDIRECT_PATCH key = key_for_path(FEATURES, message.data) if key - @feature_store.upsert(FEATURES, @requestor.request_flag(key)) + @data_store.upsert(FEATURES, @requestor.request_flag(key)) else key = key_for_path(SEGMENTS, message.data) if key - @feature_store.upsert(SEGMENTS, @requestor.request_segment(key)) + @data_store.upsert(SEGMENTS, @requestor.request_segment(key)) end end else diff --git a/spec/feature_store_spec_base.rb b/spec/data_store_spec_base.rb similarity index 97% rename from spec/feature_store_spec_base.rb rename to spec/data_store_spec_base.rb index 2d06f0ff..a937d93e 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/data_store_spec_base.rb @@ -1,9 +1,9 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method, clear_data_method| +shared_examples "data_store" do |create_store_method, clear_data_method| # Rather than testing with feature flag or segment data, we'll use this fake data kind - # to make it clear that feature stores need to be able to handle arbitrary data. + # to make it clear that data stores need to be able to handle arbitrary data. let(:things_kind) { { namespace: "things" } } let(:key1) { "thing1" } diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 837b775d..c9670a11 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -96,7 +96,7 @@ def []=(key, value) before do @config = LaunchDarkly::Config.new - @store = @config.feature_store + @store = @config.data_store @tmp_dir = Dir.mktmpdir end diff --git a/spec/in_memory_data_store_spec.rb b/spec/in_memory_data_store_spec.rb new file mode 100644 index 00000000..e43a2ebb --- /dev/null +++ b/spec/in_memory_data_store_spec.rb @@ -0,0 +1,12 @@ +require "data_store_spec_base" +require "spec_helper" + +def create_in_memory_store(opts = {}) + LaunchDarkly::InMemoryDataStore.new +end + +describe LaunchDarkly::InMemoryDataStore do + subject { LaunchDarkly::InMemoryDataStore } + + include_examples "data_store", method(:create_in_memory_store) +end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb deleted file mode 100644 index c403fc69..00000000 --- a/spec/in_memory_feature_store_spec.rb +++ /dev/null @@ -1,12 +0,0 @@ -require "feature_store_spec_base" -require "spec_helper" - -def create_in_memory_store(opts = {}) - LaunchDarkly::InMemoryFeatureStore.new -end - -describe LaunchDarkly::InMemoryFeatureStore do - subject { LaunchDarkly::InMemoryFeatureStore } - - include_examples "feature_store", method(:create_in_memory_store) -end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_data_store_spec.rb similarity index 65% rename from spec/integrations/consul_feature_store_spec.rb rename to spec/integrations/consul_data_store_spec.rb index e74d0f0d..07680afa 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_data_store_spec.rb @@ -1,4 +1,4 @@ -require "feature_store_spec_base" +require "data_store_spec_base" require "diplomat" require "spec_helper" @@ -13,12 +13,12 @@ } def create_consul_store(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( + LaunchDarkly::Integrations::Consul::new_data_store( $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( + LaunchDarkly::Integrations::Consul::new_data_store( $consul_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -27,16 +27,16 @@ def clear_all_data end -describe "Consul feature store" do +describe "Consul data store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) + include_examples "data_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) + include_examples "data_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_data_store_spec.rb similarity index 85% rename from spec/integrations/dynamodb_feature_store_spec.rb rename to spec/integrations/dynamodb_data_store_spec.rb index 7734670e..6dbff05d 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_data_store_spec.rb @@ -1,4 +1,4 @@ -require "feature_store_spec_base" +require "data_store_spec_base" require "aws-sdk-dynamodb" require "spec_helper" @@ -22,12 +22,12 @@ } def create_dynamodb_store(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -88,7 +88,7 @@ def create_test_client end -describe "DynamoDB feature store" do +describe "DynamoDB data store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local DynamoDB instance running. @@ -96,10 +96,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) + include_examples "data_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) + include_examples "data_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 5ca9b9fd..e894077f 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -70,21 +70,21 @@ def event_processor end it "returns the value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(client.variation("key", user, "default")).to eq "value" end it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) expect(client.variation("key", user, "default")).to eq "default" end it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -100,8 +100,8 @@ def event_processor end it "queues a feature event for an existing feature when user is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -116,8 +116,8 @@ def event_processor end it "queues a feature event for an existing feature when user key is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", @@ -147,8 +147,8 @@ def event_processor trackEvents: true ] } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -172,8 +172,8 @@ def event_processor rules: [], trackEventsFallthrough: true } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -215,8 +215,8 @@ def event_processor end it "returns a value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -224,8 +224,8 @@ def event_processor it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -233,8 +233,8 @@ def event_processor end it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -256,28 +256,28 @@ def event_processor let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } it "returns flag values" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({ key: 'userkey' }) expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end it "returns empty map for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags(nil) expect(result).to eq({}) end it "returns empty map for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({}) expect(result).to eq({}) end it "returns empty map if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = offline_client.all_flags(nil) expect(result).to eq({}) @@ -289,7 +289,7 @@ def event_processor let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } it "returns flags state" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be true @@ -322,7 +322,7 @@ def event_processor flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } - config.feature_store.init({ LaunchDarkly::FEATURES => { + config.data_store.init({ LaunchDarkly::FEATURES => { flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 }}) @@ -339,7 +339,7 @@ def event_processor flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true @@ -372,7 +372,7 @@ def event_processor end it "returns empty state for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state(nil) expect(state.valid?).to be false @@ -380,7 +380,7 @@ def event_processor end it "returns empty state for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({}) expect(state.valid?).to be false @@ -388,7 +388,7 @@ def event_processor end it "returns empty state if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = offline_client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be false @@ -472,7 +472,7 @@ def event_processor end end - describe "feature store data ordering" do + describe "data store data ordering" do let(:dependency_ordering_test_data) { { LaunchDarkly::FEATURES => { @@ -489,7 +489,7 @@ def event_processor } } - class FakeFeatureStore + class FakeDataStore attr_reader :received_data def init(all_data) @@ -518,11 +518,11 @@ def initialized? end end - it "passes data set to feature store in correct order on init" do - store = FakeFeatureStore.new - data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, + it "passes data set to data store in correct order on init" do + store = FakeDataStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.data_store, dependency_ordering_test_data) } - config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) + config = LaunchDarkly::Config.new(send_events: false, data_store: store, data_source: data_source_factory) client = subject.new("secret", config) data = store.received_data diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index d4a1d9bc..6fbaa6c9 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -6,7 +6,7 @@ let(:requestor) { double() } def with_processor(store) - config = LaunchDarkly::Config.new(feature_store: store) + config = LaunchDarkly::Config.new(data_store: store) processor = subject.new(config, requestor) begin yield processor @@ -29,7 +29,7 @@ def with_processor(store) it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryFeatureStore.new + store = LaunchDarkly::InMemoryDataStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -40,7 +40,7 @@ def with_processor(store) it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryFeatureStore.new + store = LaunchDarkly::InMemoryDataStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -53,7 +53,7 @@ def with_processor(store) describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - store = LaunchDarkly::InMemoryFeatureStore.new + store = LaunchDarkly::InMemoryDataStore.new with_processor(store) do |processor| ready = processor.start finished = ready.wait(0.2) @@ -67,7 +67,7 @@ def with_processor(store) describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be true @@ -77,7 +77,7 @@ def verify_unrecoverable_http_error(status) def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be false @@ -108,7 +108,7 @@ def verify_recoverable_http_error(status) describe 'stop' do it 'stops promptly rather than continuing to wait for poll interval' do - with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| sleep(1) # somewhat arbitrary, but should ensure that it has started polling start_time = Time.now processor.stop diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 5aec6658..6824b60b 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "feature_store_spec_base" +require "data_store_spec_base" require "json" require "redis" require "spec_helper" @@ -15,11 +15,11 @@ } def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) + LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) + LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 0 })) end def clear_all_data @@ -28,19 +28,19 @@ def clear_all_data end -describe LaunchDarkly::RedisFeatureStore do - subject { LaunchDarkly::RedisFeatureStore } +describe LaunchDarkly::RedisDataStore do + subject { LaunchDarkly::RedisDataStore } break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) + include_examples "data_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) + include_examples "data_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 648833ff..aa97cbf9 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -18,38 +18,38 @@ it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") + expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") + expect(config.data_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) + expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) + expect(config.data_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) processor.send(:process_message, delete_flag_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) + expect(config.data_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do processor.send(:process_message, patch_seg_message) processor.send(:process_message, delete_seg_message) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) + expect(config.data_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will accept INDIRECT PATCH method for flags" do flag = { key: 'key', version: 1 } allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) processor.send(:process_message, indirect_patch_flag_message); - expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) + expect(config.data_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) end it "will accept INDIRECT PATCH method for segments" do segment = { key: 'key', version: 1 } allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) processor.send(:process_message, indirect_patch_segment_message); - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) + expect(config.data_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn From 38493b952d34efc893a021f4d201a2220282fd18 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 3 Jan 2020 11:50:52 -0800 Subject: [PATCH 146/292] remove references to UpdateProcessor (now DataSource) --- lib/ldclient-rb/config.rb | 12 +----------- lib/ldclient-rb/ldclient.rb | 6 +++--- spec/ldclient_spec.rb | 6 +++--- 3 files changed, 7 insertions(+), 17 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 935abdad..560896b9 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -35,8 +35,6 @@ class Config # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. # @option opts [Object] :data_source See {#data_source}. - # @option opts [Object] :update_processor Obsolete synonym for `data_source`. - # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -59,9 +57,7 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false - @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] - @update_processor = opts[:update_processor] - @update_processor_factory = opts[:update_processor_factory] + @data_source = opts[:data_source] end # @@ -251,12 +247,6 @@ def offline? # attr_reader :data_source - # @deprecated This is replaced by {#data_source}. - attr_reader :update_processor - - # @deprecated This is replaced by {#data_source}. - attr_reader :update_processor_factory - # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index eed490a9..fd42b364 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -58,7 +58,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.use_ldd? @config.logger.info { "[LDClient] Started LaunchDarkly Client in LDD mode" } - return # requestor and update processor are not used in this mode + return # requestor and data processor are not used in this mode end data_source_or_factory = @config.data_source || self.method(:create_default_data_source) @@ -342,7 +342,7 @@ def close def create_default_data_source(sdk_key, config) if config.offline? - return NullUpdateProcessor.new + return NullDataSource.new end requestor = Requestor.new(sdk_key, config) if config.stream? @@ -419,7 +419,7 @@ def sanitize_user(user) # Used internally when the client is offline. # @private # - class NullUpdateProcessor + class NullDataSource def start e = Concurrent::Event.new e.set diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index e894077f..e1379dc4 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,7 +7,7 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } + let(:null_data) { LaunchDarkly::NullDataSource.new } let(:logger) { double().as_null_object } let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do @@ -497,7 +497,7 @@ def init(all_data) end end - class FakeUpdateProcessor + class FakeDataSource def initialize(store, data) @store = store @data = data @@ -520,7 +520,7 @@ def initialized? it "passes data set to data store in correct order on init" do store = FakeDataStore.new - data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.data_store, + data_source_factory = lambda { |sdk_key, config| FakeDataSource.new(config.data_store, dependency_ordering_test_data) } config = LaunchDarkly::Config.new(send_events: false, data_store: store, data_source: data_source_factory) client = subject.new("secret", config) From 1cfcd527c38b7eca57fe9f52b88e41316efd2836 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 14 Jan 2020 16:32:01 -0800 Subject: [PATCH 147/292] add event payload ID header --- lib/ldclient-rb/events.rb | 3 +++ spec/events_spec.rb | 34 ++++++++++++++++++++++++++++++++-- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 184facc4..bb12f6ec 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,6 +1,7 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" +require "securerandom" require "thread" require "time" @@ -359,6 +360,7 @@ def run(sdk_key, config, client, payload, formatter) events_out = formatter.make_output_events(payload.events, payload.summary) res = nil body = events_out.to_json + payload_id = SecureRandom.uuid (0..1).each do |attempt| if attempt > 0 config.logger.warn { "[LDClient] Will retry posting events after 1 second" } @@ -374,6 +376,7 @@ def run(sdk_key, config, client, payload, formatter) req["Authorization"] = sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + req["X-LaunchDarkly-Payload-ID"] = payload_id req["Connection"] = "keep-alive" res = client.request(req) rescue StandardError => exn diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 16bee286..1108a3ac 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -416,6 +416,29 @@ expect(hc.get_request["authorization"]).to eq "sdk_key" end + it "sends unique payload IDs" do + @ep = subject.new("sdk_key", default_config, hc) + e = { kind: "identify", user: user } + + @ep.add_event(e) + @ep.flush + @ep.wait_until_inactive + req0 = hc.get_request + + @ep.add_event(e) + @ep.flush + @ep.wait_until_inactive + req1 = hc.get_request + + id0 = req0["x-launchdarkly-payload-id"] + id1 = req1["x-launchdarkly-payload-id"] + expect(id0).not_to be_nil + expect(id0).not_to eq "" + expect(id1).not_to be nil + expect(id1).not_to eq "" + expect(id1).not_to eq id0 + end + def verify_unrecoverable_http_error(status) @ep = subject.new("sdk_key", default_config, hc) e = { kind: "identify", user: user } @@ -442,8 +465,15 @@ def verify_recoverable_http_error(status) @ep.flush @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil - expect(hc.get_request).not_to be_nil + req0 = hc.get_request + expect(req0).not_to be_nil + req1 = hc.get_request + expect(req1).not_to be_nil + id0 = req0["x-launchdarkly-payload-id"] + expect(id0).not_to be_nil + expect(id0).not_to eq "" + expect(req1["x-launchdarkly-payload-id"]).to eq id0 + expect(hc.get_request).to be_nil # no 3rd request # now verify that a subsequent flush still generates a request From 9865a9847aa0405c2bd6b51ab9c8890ab8634a28 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Jan 2020 18:40:56 -0800 Subject: [PATCH 148/292] (6.0) drop support for old Ruby versions --- .circleci/config.yml | 89 +++++--------------------------------------- README.md | 2 +- 2 files changed, 11 insertions(+), 80 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c6ff6938..f976071f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,12 +4,10 @@ workflows: version: 2 test: jobs: - - test-misc-rubies - - test-2.2 - - test-2.3 - test-2.4 - test-2.5 - test-2.6 + - test-2.7 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -30,105 +28,38 @@ ruby-docker-template: &ruby-docker-template path: ./rspec jobs: - test-2.2: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.2.10-jessie - - image: consul - - image: redis - - image: amazon/dynamodb-local - test-2.3: + test-2.4: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.3.7-jessie + - image: circleci/ruby:2.4 - image: consul - image: redis - image: amazon/dynamodb-local - test-2.4: + test-2.5: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.4.5-stretch + - image: circleci/ruby:2.5 - image: consul - image: redis - image: amazon/dynamodb-local - test-2.5: + test-2.6: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.5.3-stretch + - image: circleci/ruby:2.6 - image: consul - image: redis - image: amazon/dynamodb-local - test-2.6: + test-2.7: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.6.2-stretch + - image: circleci/ruby:2.7 - image: consul - image: redis - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - - image: circleci/jruby:9-jdk + - image: circleci/jruby:9.2-jdk - image: consul - image: redis - image: amazon/dynamodb-local - - # The following very slow job uses an Ubuntu container to run the Ruby versions that - # CircleCI doesn't provide Docker images for. - test-misc-rubies: - machine: - image: circleci/classic:latest - environment: - - RUBIES: "jruby-9.1.17.0" - steps: - - run: sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - - run: sudo apt-get -q update - - run: sudo apt-get -qy install redis-server - - run: sudo apt-cache policy docker-ce - - run: sudo apt-get -qy install docker-ce - - checkout - - run: - name: install all Ruby versions - command: "parallel rvm install ::: $RUBIES" - - run: - name: bundle install for all versions - shell: /bin/bash -leo pipefail # need -l in order for "rvm use" to work - command: | - set -e; - for i in $RUBIES; - do - rvm use $i; - if [[ $i == jruby* ]]; then - gem install jruby-openssl; # required by bundler, no effect on Ruby MRI - fi - # bundler 2.0 may be preinstalled, we need to remove it if so - yes | gem uninstall bundler --version '>=2.0' || true; - gem install bundler -v 1.17.3; - bundle install; - mv Gemfile.lock "Gemfile.lock.$i" - done - - run: - name: start DynamoDB - command: docker run -p 8000:8000 amazon/dynamodb-local - background: true - - run: - name: download Consul - command: wget https://releases.hashicorp.com/consul/0.8.0/consul_0.8.0_linux_amd64.zip - - run: - name: extract Consul - command: unzip consul_0.8.0_linux_amd64.zip - - run: - name: start Consul - command: ./consul agent -dev - background: true - - run: - name: run tests for all versions - shell: /bin/bash -leo pipefail - command: | - set -e; - for i in $RUBIES; - do - rvm use $i; - cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; - done diff --git a/README.md b/README.md index d3f99b69..c6a6adfc 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ LaunchDarkly overview Supported Ruby versions ----------------------- -This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1.6 for JRuby. +This version of the LaunchDarkly SDK has a minimum Ruby version of 2.3.0, or 9.2.0 for JRuby. Getting started ----------- From bb0d3b1dfffe892bc6d58f2ab072d5e21ba331b3 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Jan 2020 17:38:45 -0800 Subject: [PATCH 149/292] add Ruby version constraint to gemspec --- launchdarkly-server-sdk.gemspec | 1 + 1 file changed, 1 insertion(+) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 2e95cd41..264cf16f 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -20,6 +20,7 @@ Gem::Specification.new do |spec| spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.extensions = 'ext/mkrf_conf.rb' + spec.required_ruby_version = ">= 2.4.0" spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" From 4aaf75eb7869cd4f6c6db06b571e1d687aef81da Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 9 Mar 2020 16:45:49 -0700 Subject: [PATCH 150/292] remove Rake dependency --- Gemfile.lock | 2 -- Rakefile | 5 ----- launchdarkly-server-sdk.gemspec | 1 - 3 files changed, 8 deletions(-) delete mode 100644 Rakefile diff --git a/Gemfile.lock b/Gemfile.lock index 5bd07e9c..8ae43040 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -49,7 +49,6 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) ffi (>= 0.5.0, < 2) @@ -92,7 +91,6 @@ DEPENDENCIES diplomat (>= 2.0.2) launchdarkly-server-sdk! listen (~> 3.0) - rake (~> 10.0) redis (~> 3.3.5) rspec (~> 3.2) rspec_junit_formatter (~> 0.3.0) diff --git a/Rakefile b/Rakefile deleted file mode 100644 index fd36e8a5..00000000 --- a/Rakefile +++ /dev/null @@ -1,5 +0,0 @@ -require "bundler/gem_tasks" - -require "rspec/core/rake_task" -RSpec::Core::RakeTask.new(:spec) -task default: :spec diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 2e95cd41..eaf52cd5 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -28,7 +28,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" - spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb From 441a1953c9af36d682c79e29ae87aa3e83f3d923 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 10 Mar 2020 10:50:04 -0700 Subject: [PATCH 151/292] update ld-eventsource to 1.0.2 which doesn't have Rake dependency --- Gemfile.lock | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 8ae43040..77a3bf7d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -4,7 +4,7 @@ PATH launchdarkly-server-sdk (5.6.2) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (= 1.0.1) + ld-eventsource (= 1.0.2) semantic (~> 1.6) GEM @@ -23,7 +23,7 @@ GEM aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.5) + concurrent-ruby (1.1.6) connection_pool (2.2.1) diff-lcs (1.3) diplomat (2.0.2) @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.1) + ld-eventsource (1.0.2) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index eaf52cd5..f69c74fa 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -35,5 +35,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", "1.0.1" + spec.add_runtime_dependency "ld-eventsource", "1.0.2" end From 602c5e6dc3b41f2dbae982913a39255d90e9101d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 10 Mar 2020 13:01:45 -0700 Subject: [PATCH 152/292] implement diagnostic events in Ruby (#130) --- lib/ldclient-rb/config.rb | 64 ++ lib/ldclient-rb/events.rb | 180 ++-- lib/ldclient-rb/flags_state.rb | 2 +- lib/ldclient-rb/impl/diagnostic_events.rb | 130 +++ lib/ldclient-rb/impl/event_sender.rb | 72 ++ lib/ldclient-rb/impl/util.rb | 19 + lib/ldclient-rb/ldclient.rb | 21 +- lib/ldclient-rb/requestor.rb | 3 +- lib/ldclient-rb/stream.rb | 23 +- spec/diagnostic_events_spec.rb | 163 +++ spec/evaluation_spec.rb | 2 +- spec/event_sender_spec.rb | 179 ++++ spec/events_spec.rb | 961 ++++++++---------- spec/file_data_source_spec.rb | 2 +- spec/http_util.rb | 17 +- .../integrations/consul_feature_store_spec.rb | 2 - .../dynamodb_feature_store_spec.rb | 2 - spec/ldclient_spec.rb | 2 +- spec/polling_spec.rb | 2 +- spec/redis_feature_store_spec.rb | 3 - spec/requestor_spec.rb | 24 +- spec/spec_helper.rb | 3 + 22 files changed, 1237 insertions(+), 639 deletions(-) create mode 100644 lib/ldclient-rb/impl/diagnostic_events.rb create mode 100644 lib/ldclient-rb/impl/event_sender.rb create mode 100644 lib/ldclient-rb/impl/util.rb create mode 100644 spec/diagnostic_events_spec.rb create mode 100644 spec/event_sender_spec.rb diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c7c42e56..f3612756 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -37,6 +37,10 @@ class Config # @option opts [Object] :data_source See {#data_source}. # @option opts [Object] :update_processor Obsolete synonym for `data_source`. # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. + # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. + # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. + # @option opts [String] :wrapper_name See {#wrapper_name}. + # @option opts [String] :wrapper_version See {#wrapper_version}. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -62,6 +66,11 @@ def initialize(opts = {}) @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] @update_processor = opts[:update_processor] @update_processor_factory = opts[:update_processor_factory] + @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] + @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? + opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval + @wrapper_name = opts[:wrapper_name] + @wrapper_version = opts[:wrapper_version] end # @@ -257,6 +266,45 @@ def offline? # @deprecated This is replaced by {#data_source}. attr_reader :update_processor_factory + # + # Set to true to opt out of sending diagnostics data. + # + # Unless `diagnostic_opt_out` is set to true, the client will send some diagnostics data to the LaunchDarkly servers + # in order to assist in the development of future SDK improvements. These diagnostics consist of an initial payload + # containing some details of the SDK in use, the SDK's configuration, and the platform the SDK is being run on, as + # well as periodic information on irregular occurrences such as dropped events. + # @return [Boolean] + # + def diagnostic_opt_out? + @diagnostic_opt_out + end + + # + # The interval at which periodic diagnostic data is sent, in seconds. + # + # The default is 900 (every 15 minutes) and the minimum value is 60 (every minute). + # @return [Float] + # + attr_reader :diagnostic_recording_interval + + # + # For use by wrapper libraries to set an identifying name for the wrapper being used. + # + # This will be sent in User-Agent headers during requests to the LaunchDarkly servers to allow recording + # metrics on the usage of these wrapper libraries. + # @return [String] + # + attr_reader :wrapper_name + + # + # For use by wrapper libraries to report the version of the library in use. + # + # If `wrapper_name` is not set, this field will be ignored. Otherwise the version string will be included in + # the User-Agent headers along with the `wrapper_name` during requests to the LaunchDarkly servers. + # @return [String] + # + attr_reader :wrapper_version + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. @@ -407,5 +455,21 @@ def self.default_user_keys_capacity def self.default_user_keys_flush_interval 300 end + + # + # The default value for {#diagnostic_recording_interval}. + # @return [Float] 900 + # + def self.default_diagnostic_recording_interval + 900 + end + + # + # The minimum value for {#diagnostic_recording_interval}. + # @return [Float] 60 + # + def self.minimum_diagnostic_recording_interval + 60 + end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index bb12f6ec..9313b670 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,7 +1,10 @@ +require "ldclient-rb/impl/diagnostic_events" +require "ldclient-rb/impl/event_sender" +require "ldclient-rb/impl/util" + require "concurrent" require "concurrent/atomics" require "concurrent/executors" -require "securerandom" require "thread" require "time" @@ -24,12 +27,10 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 - CURRENT_SCHEMA_VERSION = 3 USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name ] private_constant :MAX_FLUSH_WORKERS - private_constant :CURRENT_SCHEMA_VERSION private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS # @private @@ -60,6 +61,10 @@ class FlushMessage class FlushUsersMessage end + # @private + class DiagnosticEventMessage + end + # @private class SynchronousMessage def initialize @@ -85,9 +90,9 @@ class StopMessage < SynchronousMessage # @private class EventProcessor - def initialize(sdk_key, config, client = nil) + def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test_properties = nil) @logger = config.logger - @inbox = SizedQueue.new(config.capacity) + @inbox = SizedQueue.new(config.capacity < 100 ? 100 : config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do post_to_inbox(FlushMessage.new) end @@ -96,14 +101,29 @@ def initialize(sdk_key, config, client = nil) post_to_inbox(FlushUsersMessage.new) end @users_flush_task.execute + if !diagnostic_accumulator.nil? + interval = test_properties && test_properties.has_key?(:diagnostic_recording_interval) ? + test_properties[:diagnostic_recording_interval] : + config.diagnostic_recording_interval + @diagnostic_event_task = Concurrent::TimerTask.new(execution_interval: interval) do + post_to_inbox(DiagnosticEventMessage.new) + end + @diagnostic_event_task.execute + else + @diagnostic_event_task = nil + end @stopped = Concurrent::AtomicBoolean.new(false) @inbox_full = Concurrent::AtomicBoolean.new(false) - EventDispatcher.new(@inbox, sdk_key, config, client) + event_sender = test_properties && test_properties.has_key?(:event_sender) ? + test_properties[:event_sender] : + Impl::EventSender.new(sdk_key, config, client ? client : Util.new_http_client(config.events_uri, config)) + + EventDispatcher.new(@inbox, sdk_key, config, diagnostic_accumulator, event_sender) end def add_event(event) - event[:creationDate] = (Time.now.to_f * 1000).to_i + event[:creationDate] = Impl::Util.current_time_millis post_to_inbox(EventMessage.new(event)) end @@ -117,6 +137,7 @@ def stop if @stopped.make_true @flush_task.shutdown @users_flush_task.shutdown + @diagnostic_event_task.shutdown if !@diagnostic_event_task.nil? # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox # is full; an orderly shutdown can't happen unless these messages are received. @inbox << FlushMessage.new @@ -152,34 +173,36 @@ def post_to_inbox(message) # @private class EventDispatcher - def initialize(inbox, sdk_key, config, client) + def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @sdk_key = sdk_key @config = config - - if client - @client = client - else - @client = Util.new_http_client(@config.events_uri, @config) - end + @diagnostic_accumulator = config.diagnostic_opt_out? ? nil : diagnostic_accumulator + @event_sender = event_sender @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) - + @deduplicated_users = 0 + @events_in_last_batch = 0 + outbox = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) - Thread.new { main_loop(inbox, outbox, flush_workers) } + if !@diagnostic_accumulator.nil? + diagnostic_event_workers = NonBlockingThreadPool.new(1) + init_event = @diagnostic_accumulator.create_init_event(config) + send_diagnostic_event(init_event, diagnostic_event_workers) + else + diagnostic_event_workers = nil + end + + Thread.new { main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) } end private - def now_millis() - (Time.now.to_f * 1000).to_i - end - - def main_loop(inbox, outbox, flush_workers) + def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) running = true while running do begin @@ -191,11 +214,13 @@ def main_loop(inbox, outbox, flush_workers) trigger_flush(outbox, flush_workers) when FlushUsersMessage @user_keys.clear + when DiagnosticEventMessage + send_and_reset_diagnostics(outbox, diagnostic_event_workers) when TestSyncMessage - synchronize_for_testing(flush_workers) + synchronize_for_testing(flush_workers, diagnostic_event_workers) message.completed when StopMessage - do_shutdown(flush_workers) + do_shutdown(flush_workers, diagnostic_event_workers) running = false message.completed end @@ -205,18 +230,23 @@ def main_loop(inbox, outbox, flush_workers) end end - def do_shutdown(flush_workers) + def do_shutdown(flush_workers, diagnostic_event_workers) flush_workers.shutdown flush_workers.wait_for_termination + if !diagnostic_event_workers.nil? + diagnostic_event_workers.shutdown + diagnostic_event_workers.wait_for_termination + end begin @client.finish rescue end end - def synchronize_for_testing(flush_workers) + def synchronize_for_testing(flush_workers, diagnostic_event_workers) # Used only by unit tests. Wait until all active flush workers have finished. flush_workers.wait_all + diagnostic_event_workers.wait_all if !diagnostic_event_workers.nil? end def dispatch_event(event, outbox) @@ -260,7 +290,9 @@ def notice_user(user) if user.nil? || !user.has_key?(:key) true else - @user_keys.add(user[:key].to_s) + known = @user_keys.add(user[:key].to_s) + @deduplicated_users += 1 if known + known end end @@ -268,7 +300,7 @@ def should_debug_event(event) debug_until = event[:debugEventsUntilDate] if !debug_until.nil? last_past = @last_known_past_time.value - debug_until > last_past && debug_until > now_millis + debug_until > last_past && debug_until > Impl::Util.current_time_millis else false end @@ -281,34 +313,44 @@ def trigger_flush(outbox, flush_workers) payload = outbox.get_payload if !payload.events.empty? || !payload.summary.counters.empty? + count = payload.events.length + (payload.summary.counters.empty? ? 0 : 1) + @events_in_last_batch = count # If all available worker threads are busy, success will be false and no job will be queued. success = flush_workers.post do begin - resp = EventPayloadSendTask.new.run(@sdk_key, @config, @client, payload, @formatter) - handle_response(resp) if !resp.nil? + events_out = @formatter.make_output_events(payload.events, payload.summary) + result = @event_sender.send_event_data(events_out.to_json, false) + @disabled.value = true if result.must_shutdown + if !result.time_from_server.nil? + @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i + end rescue => e Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end outbox.clear if success # Reset our internal state, these events now belong to the flush worker + else + @events_in_last_batch = 0 end end - def handle_response(res) - status = res.code.to_i - if status >= 400 - message = Util.http_error_message(status, "event delivery", "some events were dropped") - @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(status) - @disabled.value = true - end - else - if !res["date"].nil? - begin - res_time = (Time.httpdate(res["date"]).to_f * 1000).to_i - @last_known_past_time.value = res_time - rescue ArgumentError - end + def send_and_reset_diagnostics(outbox, diagnostic_event_workers) + return if @diagnostic_accumulator.nil? + dropped_count = outbox.get_and_clear_dropped_count + event = @diagnostic_accumulator.create_periodic_event_and_reset(dropped_count, @deduplicated_users, @events_in_last_batch) + @deduplicated_users = 0 + @events_in_last_batch = 0 + send_diagnostic_event(event, diagnostic_event_workers) + end + + def send_diagnostic_event(event, diagnostic_event_workers) + return if diagnostic_event_workers.nil? + uri = URI(@config.events_uri + "/diagnostic") + diagnostic_event_workers.post do + begin + @event_sender.send_event_data(event.to_json, true) + rescue => e + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end @@ -323,6 +365,7 @@ def initialize(capacity, logger) @capacity = capacity @logger = logger @capacity_exceeded = false + @dropped_events = 0 @events = [] @summarizer = EventSummarizer.new end @@ -333,6 +376,7 @@ def add_event(event) @events.push(event) @capacity_exceeded = false else + @dropped_events += 1 if !@capacity_exceeded @capacity_exceeded = true @logger.warn { "[LDClient] Exceeded event queue capacity. Increase capacity to avoid dropping events." } @@ -348,54 +392,18 @@ def get_payload return FlushPayload.new(@events, @summarizer.snapshot) end + def get_and_clear_dropped_count + ret = @dropped_events + @dropped_events = 0 + ret + end + def clear @events = [] @summarizer.clear end end - # @private - class EventPayloadSendTask - def run(sdk_key, config, client, payload, formatter) - events_out = formatter.make_output_events(payload.events, payload.summary) - res = nil - body = events_out.to_json - payload_id = SecureRandom.uuid - (0..1).each do |attempt| - if attempt > 0 - config.logger.warn { "[LDClient] Will retry posting events after 1 second" } - sleep(1) - end - begin - client.start if !client.started? - config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } - uri = URI(config.events_uri + "/bulk") - req = Net::HTTP::Post.new(uri) - req.content_type = "application/json" - req.body = body - req["Authorization"] = sdk_key - req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req["X-LaunchDarkly-Payload-ID"] = payload_id - req["Connection"] = "keep-alive" - res = client.request(req) - rescue StandardError => exn - config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } - next - end - status = res.code.to_i - if status < 200 || status >= 300 - if Util.http_error_recoverable?(status) - next - end - end - break - end - # used up our retries, return the last response if any - res - end - end - # @private class EventOutputFormatter def initialize(config) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 4efe1404..496ad61b 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -22,7 +22,7 @@ def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = fal meta = {} with_details = !details_only_if_tracked || flag[:trackEvents] if !with_details && flag[:debugEventsUntilDate] - with_details = flag[:debugEventsUntilDate] > (Time.now.to_f * 1000).to_i + with_details = flag[:debugEventsUntilDate] > Impl::Util::current_time_millis end if with_details meta[:version] = flag[:version] diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb new file mode 100644 index 00000000..4c61a905 --- /dev/null +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -0,0 +1,130 @@ +require "ldclient-rb/impl/util" + +require "rbconfig" +require "securerandom" + +module LaunchDarkly + module Impl + class DiagnosticAccumulator + def self.create_diagnostic_id(sdk_key) + { + diagnosticId: SecureRandom.uuid, + sdkKeySuffix: sdk_key[-6..-1] || sdk_key + } + end + + def initialize(diagnostic_id) + @id = diagnostic_id + @lock = Mutex.new + self.reset(Util.current_time_millis) + end + + def reset(time) + @data_since_date = time + @stream_inits = [] + end + + def create_init_event(config) + return { + kind: 'diagnostic-init', + creationDate: Util.current_time_millis, + id: @id, + configuration: DiagnosticAccumulator.make_config_data(config), + sdk: DiagnosticAccumulator.make_sdk_data(config), + platform: DiagnosticAccumulator.make_platform_data + } + end + + def record_stream_init(timestamp, failed, duration_millis) + @lock.synchronize do + @stream_inits.push({ timestamp: timestamp, failed: failed, durationMillis: duration_millis }) + end + end + + def create_periodic_event_and_reset(dropped_events, deduplicated_users, events_in_last_batch) + previous_stream_inits = @lock.synchronize do + si = @stream_inits + @stream_inits = [] + si + end + + current_time = Util.current_time_millis + event = { + kind: 'diagnostic', + creationDate: current_time, + id: @id, + dataSinceDate: @data_since_date, + droppedEvents: dropped_events, + deduplicatedUsers: deduplicated_users, + eventsInLastBatch: events_in_last_batch, + streamInits: previous_stream_inits + } + @data_since_date = current_time + event + end + + def self.make_config_data(config) + ret = { + allAttributesPrivate: config.all_attributes_private, + connectTimeoutMillis: self.seconds_to_millis(config.connect_timeout), + customBaseURI: config.base_uri != Config.default_base_uri, + customEventsURI: config.events_uri != Config.default_events_uri, + customStreamURI: config.stream_uri != Config.default_stream_uri, + diagnosticRecordingIntervalMillis: self.seconds_to_millis(config.diagnostic_recording_interval), + eventsCapacity: config.capacity, + eventsFlushIntervalMillis: self.seconds_to_millis(config.flush_interval), + inlineUsersInEvents: config.inline_users_in_events, + pollingIntervalMillis: self.seconds_to_millis(config.poll_interval), + socketTimeoutMillis: self.seconds_to_millis(config.read_timeout), + streamingDisabled: !config.stream?, + userKeysCapacity: config.user_keys_capacity, + userKeysFlushIntervalMillis: self.seconds_to_millis(config.user_keys_flush_interval), + usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY'), + usingRelayDaemon: config.use_ldd?, + } + ret + end + + def self.make_sdk_data(config) + ret = { + name: 'ruby-server-sdk', + version: LaunchDarkly::VERSION + } + if config.wrapper_name + ret[:wrapperName] = config.wrapper_name + ret[:wrapperVersion] = config.wrapper_version + end + ret + end + + def self.make_platform_data + conf = RbConfig::CONFIG + { + name: 'ruby', + osArch: conf['host_cpu'], + osName: self.normalize_os_name(conf['host_os']), + osVersion: 'unknown', # there seems to be no portable way to detect this in Ruby + rubyVersion: conf['ruby_version'], + rubyImplementation: Object.constants.include?(:RUBY_ENGINE) ? RUBY_ENGINE : 'unknown' + } + end + + def self.normalize_os_name(name) + case name + when /linux|arch/i + 'Linux' + when /darwin/i + 'MacOS' + when /mswin|windows/i + 'Windows' + else + name + end + end + + def self.seconds_to_millis(s) + (s * 1000).to_i + end + end + end +end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb new file mode 100644 index 00000000..834cd3a3 --- /dev/null +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -0,0 +1,72 @@ +require "securerandom" + +module LaunchDarkly + module Impl + EventSenderResult = Struct.new(:success, :must_shutdown, :time_from_server) + + class EventSender + CURRENT_SCHEMA_VERSION = 3 + DEFAULT_RETRY_INTERVAL = 1 + + def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETRY_INTERVAL) + @client = http_client ? http_client : LaunchDarkly::Util.new_http_client(config.events_uri, config) + @sdk_key = sdk_key + @config = config + @events_uri = config.events_uri + "/bulk" + @diagnostic_uri = config.events_uri + "/diagnostic" + @logger = config.logger + @retry_interval = retry_interval + end + + def send_event_data(event_data, is_diagnostic) + uri = is_diagnostic ? @diagnostic_uri : @events_uri + payload_id = is_diagnostic ? nil : SecureRandom.uuid + description = is_diagnostic ? 'diagnostic event' : "#{event_data.length} events" + res = nil + (0..1).each do |attempt| + if attempt > 0 + @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } + sleep(@retry_interval) + end + begin + @client.start if !@client.started? + @logger.debug { "[LDClient] sending #{description}: #{body}" } + req = Net::HTTP::Post.new(uri) + req.content_type = "application/json" + req.body = event_data + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } + if !is_diagnostic + req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + req["X-LaunchDarkly-Payload-ID"] = payload_id + end + req["Connection"] = "keep-alive" + res = @client.request(req) + rescue StandardError => exn + @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } + next + end + status = res.code.to_i + if status >= 200 && status < 300 + res_time = nil + if !res["date"].nil? + begin + res_time = Time.httpdate(res["date"]) + rescue ArgumentError + end + end + return EventSenderResult.new(true, false, res_time) + end + must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) + can_retry = !must_shutdown && attempt == 0 + message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") + @logger.error { "[LDClient] #{message}" } + if must_shutdown + return EventSenderResult.new(false, true, nil) + end + end + # used up our retries + return EventSenderResult.new(false, false, nil) + end + end + end +end diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb new file mode 100644 index 00000000..d1197afe --- /dev/null +++ b/lib/ldclient-rb/impl/util.rb @@ -0,0 +1,19 @@ + +module LaunchDarkly + module Impl + module Util + def self.current_time_millis + (Time.now.to_f * 1000).to_i + end + + def self.default_http_headers(sdk_key, config) + ret = { "Authorization" => sdk_key, "User-Agent" => "RubyClient/" + LaunchDarkly::VERSION } + if config.wrapper_name + ret["X-LaunchDarkly-Wrapper"] = config.wrapper_name + + (config.wrapper_version ? "/" + config.wrapper_version : "") + end + ret + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index b7c2ee85..06db4f00 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" @@ -46,10 +47,16 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config + if !@config.offline? && @config.send_events && !@config.diagnostic_opt_out? + diagnostic_accumulator = Impl::DiagnosticAccumulator.new(Impl::DiagnosticAccumulator.create_diagnostic_id(sdk_key)) + else + diagnostic_accumulator = nil + end + if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new else - @event_processor = EventProcessor.new(sdk_key, config) + @event_processor = EventProcessor.new(sdk_key, config, diagnostic_accumulator) end if @config.use_ldd? @@ -59,7 +66,13 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) data_source_or_factory = @config.data_source || self.method(:create_default_data_source) if data_source_or_factory.respond_to? :call - @data_source = data_source_or_factory.call(sdk_key, @config) + # Currently, data source factories take two parameters unless they need to be aware of diagnostic_accumulator, in + # which case they take three parameters. This will be changed in the future to use a less awkware mechanism. + if data_source_or_factory.arity == 3 + @data_source = data_source_or_factory.call(sdk_key, @config, diagnostic_accumulator) + else + @data_source = data_source_or_factory.call(sdk_key, @config) + end else @data_source = data_source_or_factory end @@ -335,13 +348,13 @@ def close private - def create_default_data_source(sdk_key, config) + def create_default_data_source(sdk_key, config, diagnostic_accumulator) if config.offline? return NullUpdateProcessor.new end requestor = Requestor.new(sdk_key, config) if config.stream? - StreamProcessor.new(sdk_key, config, requestor) + StreamProcessor.new(sdk_key, config, requestor, diagnostic_accumulator) else config.logger.info { "Disabling streaming API" } config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index f7174787..eae0a193 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -51,8 +51,7 @@ def make_request(path) @client.start if !@client.started? uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) - req["Authorization"] = @sdk_key - req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } req["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index ddb7f669..e27fad32 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -24,7 +24,7 @@ module LaunchDarkly # @private class StreamProcessor - def initialize(sdk_key, config, requestor) + def initialize(sdk_key, config, requestor, diagnostic_accumulator = nil) @sdk_key = sdk_key @config = config @feature_store = config.feature_store @@ -33,6 +33,7 @@ def initialize(sdk_key, config, requestor) @started = Concurrent::AtomicBoolean.new(false) @stopped = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new + @connection_attempt_start_time = 0 end def initialized? @@ -44,18 +45,17 @@ def start @config.logger.info { "[LDClient] Initializing stream connection" } - headers = { - 'Authorization' => @sdk_key, - 'User-Agent' => 'RubyClient/' + LaunchDarkly::VERSION - } + headers = Impl::Util.default_http_headers(@sdk_key, @config) opts = { headers: headers, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } + log_connection_started @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| conn.on_event { |event| process_message(event) } conn.on_error { |err| + log_connection_result(false) case err when SSE::Errors::HTTPStatusError status = err.status @@ -82,6 +82,7 @@ def stop private def process_message(message) + log_connection_result(true) method = message.type @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT @@ -137,5 +138,17 @@ def process_message(message) def key_for_path(kind, path) path.start_with?(KEY_PATHS[kind]) ? path[KEY_PATHS[kind].length..-1] : nil end + + def log_connection_started + @connection_attempt_start_time = Impl::Util::current_time_millis + end + + def log_connection_result(is_success) + if !@diagnostic_accumulator.nil? && @connection_attempt_start_time > 0 + @diagnostic_accumulator.record_stream_init(@connection_attempt_start_time, !is_success, + Impl::Util::current_time_millis - @connection_attempt_start_time) + @connection_attempt_start_time = 0 + end + end end end diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb new file mode 100644 index 00000000..0c4ef058 --- /dev/null +++ b/spec/diagnostic_events_spec.rb @@ -0,0 +1,163 @@ +require "ldclient-rb/impl/diagnostic_events" + +require "spec_helper" + +module LaunchDarkly + module Impl + describe DiagnosticAccumulator do + subject { DiagnosticAccumulator } + + let(:sdk_key) { "sdk_key" } + let(:default_id) { subject.create_diagnostic_id("my-key") } + let(:default_acc) { subject.new(default_id) } + + it "creates unique ID with SDK key suffix" do + id1 = subject.create_diagnostic_id("1234567890") + expect(id1[:sdkKeySuffix]).to eq "567890" + expect(id1[:diagnosticId]).not_to be_nil + + id2 = subject.create_diagnostic_id("1234567890") + expect(id2[:diagnosticId]).not_to eq id1[:diagnosticId] + end + + describe "init event" do + def expected_default_config + { + allAttributesPrivate: false, + connectTimeoutMillis: Config.default_connect_timeout * 1000, + customBaseURI: false, + customEventsURI: false, + customStreamURI: false, + diagnosticRecordingIntervalMillis: Config.default_diagnostic_recording_interval * 1000, + eventsCapacity: Config.default_capacity, + eventsFlushIntervalMillis: Config.default_flush_interval * 1000, + inlineUsersInEvents: false, + pollingIntervalMillis: Config.default_poll_interval * 1000, + socketTimeoutMillis: Config.default_read_timeout * 1000, + streamingDisabled: false, + userKeysCapacity: Config.default_user_keys_capacity, + userKeysFlushIntervalMillis: Config.default_user_keys_flush_interval * 1000, + usingProxy: false, + usingRelayDaemon: false + } + end + + it "has basic fields" do + event = default_acc.create_init_event(Config.new) + expect(event[:kind]).to eq 'diagnostic-init' + expect(event[:creationDate]).not_to be_nil + expect(event[:id]).to eq default_id + end + + it "can have default config data" do + event = default_acc.create_init_event(Config.new) + expect(event[:configuration]).to eq expected_default_config + end + + it "can have custom config data" do + changes_and_expected = [ + [ { all_attributes_private: true }, { allAttributesPrivate: true } ], + [ { connect_timeout: 46 }, { connectTimeoutMillis: 46000 } ], + [ { base_uri: 'http://custom' }, { customBaseURI: true } ], + [ { events_uri: 'http://custom' }, { customEventsURI: true } ], + [ { stream_uri: 'http://custom' }, { customStreamURI: true } ], + [ { diagnostic_recording_interval: 9999 }, { diagnosticRecordingIntervalMillis: 9999000 } ], + [ { capacity: 4000 }, { eventsCapacity: 4000 } ], + [ { flush_interval: 46 }, { eventsFlushIntervalMillis: 46000 } ], + [ { inline_users_in_events: true }, { inlineUsersInEvents: true } ], + [ { poll_interval: 999 }, { pollingIntervalMillis: 999000 } ], + [ { read_timeout: 46 }, { socketTimeoutMillis: 46000 } ], + [ { stream: false }, { streamingDisabled: true } ], + [ { user_keys_capacity: 999 }, { userKeysCapacity: 999 } ], + [ { user_keys_flush_interval: 999 }, { userKeysFlushIntervalMillis: 999000 } ], + [ { use_ldd: true }, { usingRelayDaemon: true } ] + ] + changes_and_expected.each do |config_values, expected_values| + config = Config.new(config_values) + event = default_acc.create_init_event(config) + expect(event[:configuration]).to eq expected_default_config.merge(expected_values) + end + end + + it "detects proxy" do + begin + ENV["http_proxy"] = 'http://my-proxy' + event = default_acc.create_init_event(Config.new) + expect(event[:configuration][:usingProxy]).to be true + ensure + ENV["http_proxy"] = nil + end + end + + it "has expected SDK data" do + event = default_acc.create_init_event(Config.new) + expect(event[:sdk]).to eq ({ + name: 'ruby-server-sdk', + version: LaunchDarkly::VERSION + }) + end + + it "has expected SDK data with wrapper" do + event = default_acc.create_init_event(Config.new(wrapper_name: 'my-wrapper', wrapper_version: '2.0')) + expect(event[:sdk]).to eq ({ + name: 'ruby-server-sdk', + version: LaunchDarkly::VERSION, + wrapperName: 'my-wrapper', + wrapperVersion: '2.0' + }) + end + + it "has expected platform data" do + event = default_acc.create_init_event(Config.new) + expect(event[:platform]).to include ({ + name: 'ruby' + }) + end + end + + describe "periodic event" do + it "has correct default values" do + acc = subject.new(default_id) + event = acc.create_periodic_event_and_reset(2, 3, 4) + expect(event).to include({ + kind: 'diagnostic', + id: default_id, + droppedEvents: 2, + deduplicatedUsers: 3, + eventsInLastBatch: 4, + streamInits: [] + }) + expect(event[:creationDate]).not_to be_nil + expect(event[:dataSinceDate]).not_to be_nil + end + + it "can add stream init" do + acc = subject.new(default_id) + acc.record_stream_init(1000, false, 2000) + event = acc.create_periodic_event_and_reset(0, 0, 0) + expect(event[:streamInits]).to eq [{ timestamp: 1000, failed: false, durationMillis: 2000 }] + end + + it "resets fields after creating event" do + acc = subject.new(default_id) + acc.record_stream_init(1000, false, 2000) + event1 = acc.create_periodic_event_and_reset(2, 3, 4) + event2 = acc.create_periodic_event_and_reset(5, 6, 7) + expect(event1).to include ({ + droppedEvents: 2, + deduplicatedUsers: 3, + eventsInLastBatch: 4, + streamInits: [{ timestamp: 1000, failed: false, durationMillis: 2000 }] + }) + expect(event2).to include ({ + dataSinceDate: event1[:creationDate], + droppedEvents: 5, + deduplicatedUsers: 6, + eventsInLastBatch: 7, + streamInits: [] + }) + end + end + end + end +end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 2efbd745..14d5ed80 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -17,7 +17,7 @@ } } - let(:logger) { LaunchDarkly::Config.default_logger } + let(:logger) { $null_log } def boolean_flag_with_rules(rules) { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb new file mode 100644 index 00000000..e99761b7 --- /dev/null +++ b/spec/event_sender_spec.rb @@ -0,0 +1,179 @@ +require "ldclient-rb/impl/event_sender" + +require "http_util" +require "spec_helper" + +require "time" + +module LaunchDarkly + module Impl + describe EventSender do + subject { EventSender } + + let(:sdk_key) { "sdk_key" } + let(:fake_data) { '{"things":[]}' } + + def make_sender(server) + subject.new(sdk_key, Config.new(events_uri: server.base_uri.to_s, logger: $null_log), nil, 0.1) + end + + def with_sender_and_server + with_server do |server| + yield make_sender(server), server + end + end + + it "sends analytics event data" do + with_sender_and_server do |es, server| + server.setup_ok_response("/bulk", "") + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be true + expect(result.must_shutdown).to be false + expect(result.time_from_server).not_to be_nil + + req = server.await_request + expect(req.body).to eq fake_data + expect(req.header).to include({ + "authorization" => [ sdk_key ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "x-launchdarkly-event-schema" => [ "3" ] + }) + expect(req.header['x-launchdarkly-payload-id']).not_to eq [] + end + end + + it "generates a new payload ID for each payload" do + with_sender_and_server do |es, server| + server.setup_ok_response("/bulk", "") + + result1 = es.send_event_data(fake_data, false) + result2 = es.send_event_data(fake_data, false) + expect(result1.success).to be true + expect(result2.success).to be true + + req1, body1 = server.await_request_with_body + req2, body2 = server.await_request_with_body + expect(body1).to eq fake_data + expect(body2).to eq fake_data + expect(req1.header['x-launchdarkly-payload-id']).not_to eq req2.header['x-launchdarkly-payload-id'] + end + end + + it "sends diagnostic event data" do + with_sender_and_server do |es, server| + server.setup_ok_response("/diagnostic", "") + + result = es.send_event_data(fake_data, true) + + expect(result.success).to be true + expect(result.must_shutdown).to be false + expect(result.time_from_server).not_to be_nil + + req, body = server.await_request_with_body + expect(body).to eq fake_data + expect(req.header).to include({ + "authorization" => [ sdk_key ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + }) + expect(req.header['x-launchdarkly-event-schema']).to eq [] + expect(req.header['x-launchdarkly-payload-id']).to eq [] + end + end + + it "can use a proxy server" do + with_server do |server| + server.setup_ok_response("/bulk", "") + + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + + es = make_sender(server) + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be true + + req, body = server.await_request_with_body + expect(body).to eq fake_data + ensure + ENV["http_proxy"] = nil + end + end + end + end + + [400, 408, 429, 500].each do |status| + it "handles recoverable error #{status}" do + with_sender_and_server do |es, server| + req_count = 0 + server.setup_response("/bulk") do |req, res| + req_count = req_count + 1 + res.status = req_count == 2 ? 200 : status + end + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be true + expect(result.must_shutdown).to be false + expect(result.time_from_server).not_to be_nil + + expect(server.requests.count).to eq 2 + req1, body1 = server.await_request_with_body + req2, body2 = server.await_request_with_body + expect(body1).to eq fake_data + expect(body2).to eq fake_data + expect(req1.header['x-launchdarkly-payload-id']).to eq req2.header['x-launchdarkly-payload-id'] + end + end + end + + [400, 408, 429, 500].each do |status| + it "only retries error #{status} once" do + with_sender_and_server do |es, server| + req_count = 0 + server.setup_response("/bulk") do |req, res| + req_count = req_count + 1 + res.status = req_count == 3 ? 200 : status + end + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be false + expect(result.must_shutdown).to be false + expect(result.time_from_server).to be_nil + + expect(server.requests.count).to eq 2 + req1, body1 = server.await_request_with_body + req2, body2 = server.await_request_with_body + expect(body1).to eq fake_data + expect(body2).to eq fake_data + expect(req1.header['x-launchdarkly-payload-id']).to eq req2.header['x-launchdarkly-payload-id'] + end + end + end + + [401, 403].each do |status| + it "gives up after unrecoverable error #{status}" do + with_sender_and_server do |es, server| + server.setup_response("/bulk") do |req, res| + res.status = status + end + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be false + expect(result.must_shutdown).to be true + expect(result.time_from_server).to be_nil + + expect(server.requests.count).to eq 1 + end + end + end + end + end +end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 1108a3ac..a36fa95f 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -5,8 +5,8 @@ describe LaunchDarkly::EventProcessor do subject { LaunchDarkly::EventProcessor } - let(:default_config) { LaunchDarkly::Config.new } - let(:hc) { FakeHttpClient.new } + let(:default_config_opts) { { diagnostic_opt_out: true, logger: $null_log } } + let(:default_config) { LaunchDarkly::Config.new(default_config_opts) } let(:user) { { key: "userkey", name: "Red" } } let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } let(:numeric_user) { { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, @@ -14,546 +14,508 @@ let(:stringified_numeric_user) { { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } - after(:each) do - if !@ep.nil? - @ep.stop + def with_processor_and_sender(config) + sender = FakeEventSender.new + ep = subject.new("sdk_key", config, nil, nil, { event_sender: sender }) + begin + yield ep, sender + ensure + ep.stop end end it "queues identify event" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", key: user[:key], user: user } - @ep.add_event(e) + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) - output = flush_and_get_events - expect(output).to contain_exactly(e) + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly(e) + end end it "filters user in identify event" do - config = LaunchDarkly::Config.new(all_attributes_private: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "identify", key: user[:key], user: user } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly({ - kind: "identify", - key: user[:key], - creationDate: e[:creationDate], - user: filtered_user - }) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly({ + kind: "identify", + key: user[:key], + creationDate: e[:creationDate], + user: filtered_user + }) + end end it "stringifies built-in user attributes in identify event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - e = { kind: "identify", key: numeric_user[:key], user: numeric_user } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - kind: "identify", - key: numeric_user[:key].to_s, - creationDate: e[:creationDate], - user: stringified_numeric_user - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + e = { kind: "identify", key: numeric_user[:key], user: numeric_user } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + kind: "identify", + key: numeric_user[:key].to_s, + creationDate: e[:creationDate], + user: stringified_numeric_user + ) + end end it "queues individual feature event with index event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, false, nil)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end end it "filters user in index event" do - config = LaunchDarkly::Config.new(all_attributes_private: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, filtered_user)), - eq(feature_event(fe, flag, false, nil)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, filtered_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end end it "stringifies built-in user attributes in index event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: numeric_user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, stringified_numeric_user)), - eq(feature_event(fe, flag, false, nil)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, stringified_numeric_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end end it "can include inline user in feature event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, user)), + include(:kind => "summary") + ) + end end it "stringifies built-in user attributes in feature event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: numeric_user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, stringified_numeric_user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, stringified_numeric_user)), + include(:kind => "summary") + ) + end end it "filters user in feature event" do - config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, filtered_user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, filtered_user)), + include(:kind => "summary") + ) + end end it "still generates index event if inline_users is true but feature event was not tracked" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + include(:kind => "summary") + ) + end end it "sets event kind to debug if flag is temporarily in debug mode" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: future_time - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, true, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: future_time + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + eq(feature_event(fe, flag, true, user)), + include(:kind => "summary") + ) + end end it "can be both debugging and tracking an event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true, debugEventsUntilDate: future_time - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, false, nil)), - eq(feature_event(fe, flag, true, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true, debugEventsUntilDate: future_time + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + eq(feature_event(fe, flag, false, nil)), + eq(feature_event(fe, flag, true, user)), + include(:kind => "summary") + ) + end end it "ends debug mode based on client time if client time is later than server time" do - @ep = subject.new("sdk_key", default_config, hc) - - # Pick a server time that is somewhat behind the client time - server_time = (Time.now.to_f * 1000).to_i - 20000 - - # Send and flush an event we don't care about, just to set the last server time - hc.set_server_time(server_time) - @ep.add_event({ kind: "identify", user: { key: "otherUser" }}) - flush_and_get_events - - # Now send an event with debug mode on, with a "debug until" time that is further in - # the future than the server time, but in the past compared to the client. - flag = { key: "flagkey", version: 11 } - debug_until = server_time + 1000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until - } - @ep.add_event(fe) - - # Should get a summary event only, not a full feature event - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + # Pick a server time that is somewhat behind the client time + server_time = Time.now - 20 + + # Send and flush an event we don't care about, just to set the last server time + sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) + ep.add_event({ kind: "identify", user: user }) + flush_and_get_events(ep, sender) + + # Now send an event with debug mode on, with a "debug until" time that is further in + # the future than the server time, but in the past compared to the client. + flag = { key: "flagkey", version: 11 } + debug_until = (server_time.to_f * 1000).to_i + 1000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until + } + ep.add_event(fe) + + # Should get a summary event only, not a full feature event + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + include(:kind => "summary") + ) + end end it "ends debug mode based on server time if server time is later than client time" do - @ep = subject.new("sdk_key", default_config, hc) - - # Pick a server time that is somewhat ahead of the client time - server_time = (Time.now.to_f * 1000).to_i + 20000 - - # Send and flush an event we don't care about, just to set the last server time - hc.set_server_time(server_time) - @ep.add_event({ kind: "identify", user: { key: "otherUser" }}) - flush_and_get_events - - # Now send an event with debug mode on, with a "debug until" time that is further in - # the future than the server time, but in the past compared to the client. - flag = { key: "flagkey", version: 11 } - debug_until = server_time - 1000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until - } - @ep.add_event(fe) - - # Should get a summary event only, not a full feature event - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + # Pick a server time that is somewhat ahead of the client time + server_time = Time.now + 20 + + # Send and flush an event we don't care about, just to set the last server time + sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) + ep.add_event({ kind: "identify", user: user }) + flush_and_get_events(ep, sender) + + # Now send an event with debug mode on, with a "debug until" time that is further in + # the future than the server time, but in the past compared to the client. + flag = { key: "flagkey", version: 11 } + debug_until = (server_time.to_f * 1000).to_i - 1000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until + } + ep.add_event(fe) + + # Should get a summary event only, not a full feature event + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + include(:kind => "summary") + ) + end end it "generates only one index event for multiple events with same user" do - @ep = subject.new("sdk_key", default_config, hc) - flag1 = { key: "flagkey1", version: 11 } - flag2 = { key: "flagkey2", version: 22 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe1 = { - kind: "feature", key: "flagkey1", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - fe2 = { - kind: "feature", key: "flagkey2", version: 22, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe1) - @ep.add_event(fe2) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe1, user)), - eq(feature_event(fe1, flag1, false, nil)), - eq(feature_event(fe2, flag2, false, nil)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag1 = { key: "flagkey1", version: 11 } + flag2 = { key: "flagkey2", version: 22 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe1 = { + kind: "feature", key: "flagkey1", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + fe2 = { + kind: "feature", key: "flagkey2", version: 22, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe1) + ep.add_event(fe2) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe1, user)), + eq(feature_event(fe1, flag1, false, nil)), + eq(feature_event(fe2, flag2, false, nil)), + include(:kind => "summary") + ) + end end it "summarizes non-tracked events" do - @ep = subject.new("sdk_key", default_config, hc) - flag1 = { key: "flagkey1", version: 11 } - flag2 = { key: "flagkey2", version: 22 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe1 = { - kind: "feature", key: "flagkey1", version: 11, user: user, - variation: 1, value: "value1", default: "default1" - } - fe2 = { - kind: "feature", key: "flagkey2", version: 22, user: user, - variation: 2, value: "value2", default: "default2" - } - @ep.add_event(fe1) - @ep.add_event(fe2) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe1, user)), - eq({ - kind: "summary", - startDate: fe1[:creationDate], - endDate: fe2[:creationDate], - features: { - flagkey1: { - default: "default1", - counters: [ - { version: 11, variation: 1, value: "value1", count: 1 } - ] - }, - flagkey2: { - default: "default2", - counters: [ - { version: 22, variation: 2, value: "value2", count: 1 } - ] + with_processor_and_sender(default_config) do |ep, sender| + flag1 = { key: "flagkey1", version: 11 } + flag2 = { key: "flagkey2", version: 22 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe1 = { + kind: "feature", key: "flagkey1", version: 11, user: user, + variation: 1, value: "value1", default: "default1" + } + fe2 = { + kind: "feature", key: "flagkey2", version: 22, user: user, + variation: 2, value: "value2", default: "default2" + } + ep.add_event(fe1) + ep.add_event(fe2) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe1, user)), + eq({ + kind: "summary", + startDate: fe1[:creationDate], + endDate: fe2[:creationDate], + features: { + flagkey1: { + default: "default1", + counters: [ + { version: 11, variation: 1, value: "value1", count: 1 } + ] + }, + flagkey2: { + default: "default2", + counters: [ + { version: 22, variation: 2, value: "value2", count: 1 } + ] + } } - } - }) - ) + }) + ) + end end it "queues custom event with user" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(e, user)), - eq(custom_event(e, nil)) - ) + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(e, user)), + eq(custom_event(e, nil)) + ) + end end it "can include inline user in custom event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(custom_event(e, user)) - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(custom_event(e, user)) + ) + end end it "filters user in custom event" do - config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(custom_event(e, filtered_user)) - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(custom_event(e, filtered_user)) + ) + end end it "stringifies built-in user attributes in custom event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "custom", key: "eventkey", user: numeric_user } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(custom_event(e, stringified_numeric_user)) - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: numeric_user } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(custom_event(e, stringified_numeric_user)) + ) + end end it "does a final flush when shutting down" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", key: user[:key], user: user } - @ep.add_event(e) - - @ep.stop - - output = get_events_from_last_request - expect(output).to contain_exactly(e) + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + + ep.stop + + output = sender.analytics_payloads.pop + expect(output).to contain_exactly(e) + end end it "sends nothing if there are no events" do - @ep = subject.new("sdk_key", default_config, hc) - @ep.flush - expect(hc.get_request).to be nil - end - - it "sends SDK key" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) - - @ep.flush - @ep.wait_until_inactive - - expect(hc.get_request["authorization"]).to eq "sdk_key" - end - - it "sends unique payload IDs" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - req0 = hc.get_request - - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - req1 = hc.get_request - - id0 = req0["x-launchdarkly-payload-id"] - id1 = req1["x-launchdarkly-payload-id"] - expect(id0).not_to be_nil - expect(id0).not_to eq "" - expect(id1).not_to be nil - expect(id1).not_to eq "" - expect(id1).not_to eq id0 - end - - def verify_unrecoverable_http_error(status) - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) - - hc.set_response_status(status) - @ep.flush - @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil - hc.reset - - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - expect(hc.get_request).to be_nil - end - - def verify_recoverable_http_error(status) - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) - - hc.set_response_status(503) - @ep.flush - @ep.wait_until_inactive - - req0 = hc.get_request - expect(req0).not_to be_nil - req1 = hc.get_request - expect(req1).not_to be_nil - id0 = req0["x-launchdarkly-payload-id"] - expect(id0).not_to be_nil - expect(id0).not_to eq "" - expect(req1["x-launchdarkly-payload-id"]).to eq id0 - - expect(hc.get_request).to be_nil # no 3rd request - - # now verify that a subsequent flush still generates a request - hc.reset - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil + with_processor_and_sender(default_config) do |ep, sender| + ep.flush + ep.wait_until_inactive + expect(sender.analytics_payloads.empty?).to be true + end end - it "stops posting events after getting a 401 error" do - verify_unrecoverable_http_error(401) - end + it "stops posting events after unrecoverable error" do + with_processor_and_sender(default_config) do |ep, sender| + sender.result = LaunchDarkly::Impl::EventSenderResult.new(false, true, nil) + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + flush_and_get_events(ep, sender) - it "stops posting events after getting a 403 error" do - verify_unrecoverable_http_error(403) + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + ep.flush + ep.wait_until_inactive + expect(sender.analytics_payloads.empty?).to be true + end end - it "retries after 408 error" do - verify_recoverable_http_error(408) - end + describe "diagnostic events" do + let(:default_id) { LaunchDarkly::Impl::DiagnosticAccumulator.create_diagnostic_id('sdk_key') } + let(:diagnostic_config) { LaunchDarkly::Config.new(diagnostic_opt_out: false, logger: $null_log) } - it "retries after 429 error" do - verify_recoverable_http_error(429) - end + def with_diagnostic_processor_and_sender(config) + sender = FakeEventSender.new + acc = LaunchDarkly::Impl::DiagnosticAccumulator.new(default_id) + ep = subject.new("sdk_key", config, nil, acc, + { diagnostic_recording_interval: 0.2, event_sender: sender }) + begin + yield ep, sender + ensure + ep.stop + end + end - it "retries after 503 error" do - verify_recoverable_http_error(503) - end + it "sends init event" do + with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| + event = sender.diagnostic_payloads.pop + expect(event).to include({ + kind: 'diagnostic-init', + id: default_id + }) + end + end - it "retries flush once after connection error" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) + it "sends periodic event" do + with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| + init_event = sender.diagnostic_payloads.pop + periodic_event = sender.diagnostic_payloads.pop + expect(periodic_event).to include({ + kind: 'diagnostic', + id: default_id, + droppedEvents: 0, + deduplicatedUsers: 0, + eventsInLastBatch: 0, + streamInits: [] + }) + end + end - hc.set_exception(IOError.new("deliberate error")) - @ep.flush - @ep.wait_until_inactive + it "counts events in queue from last flush and dropped events" do + config = LaunchDarkly::Config.new(diagnostic_opt_out: false, capacity: 2, logger: $null_log) + with_diagnostic_processor_and_sender(config) do |ep, sender| + init_event = sender.diagnostic_payloads.pop + + ep.add_event({ kind: 'identify', user: user }) + ep.add_event({ kind: 'identify', user: user }) + ep.add_event({ kind: 'identify', user: user }) + flush_and_get_events(ep, sender) + + periodic_event = sender.diagnostic_payloads.pop + expect(periodic_event).to include({ + kind: 'diagnostic', + droppedEvents: 1, + eventsInLastBatch: 2 + }) + end + end - expect(hc.get_request).not_to be_nil - expect(hc.get_request).not_to be_nil - expect(hc.get_request).to be_nil # no 3rd request - end + it "counts deduplicated users" do + with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| + init_event = sender.diagnostic_payloads.pop - it "makes actual HTTP request with correct headers" do - e = { kind: "identify", key: user[:key], user: user } - with_server do |server| - server.setup_ok_response("/bulk", "") - - @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) - @ep.add_event(e) - @ep.flush - - req = server.await_request - expect(req.header).to include({ - "authorization" => [ "sdk_key" ], - "content-type" => [ "application/json" ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], - "x-launchdarkly-event-schema" => [ "3" ] - }) - end - end + ep.add_event({ kind: 'custom', key: 'event1', user: user }) + ep.add_event({ kind: 'custom', key: 'event2', user: user }) + events = flush_and_get_events(ep, sender) - it "can use a proxy server" do - e = { kind: "identify", key: user[:key], user: user } - with_server do |server| - server.setup_ok_response("/bulk", "") - - with_server(StubProxyServer.new) do |proxy| - begin - ENV["http_proxy"] = proxy.base_uri.to_s - @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) - @ep.add_event(e) - @ep.flush - - req = server.await_request - expect(req["content-type"]).to eq("application/json") - ensure - ENV["http_proxy"] = nil - end + periodic_event = sender.diagnostic_payloads.pop + expect(periodic_event).to include({ + kind: 'diagnostic', + deduplicatedUsers: 1 + }) end end end @@ -599,75 +561,26 @@ def custom_event(e, inline_user) out end - def flush_and_get_events - @ep.flush - @ep.wait_until_inactive - get_events_from_last_request + def flush_and_get_events(ep, sender) + ep.flush + ep.wait_until_inactive + sender.analytics_payloads.pop end - def get_events_from_last_request - req = hc.get_request - JSON.parse(req.body, symbolize_names: true) - end + class FakeEventSender + attr_accessor :result + attr_reader :analytics_payloads + attr_reader :diagnostic_payloads - class FakeHttpClient def initialize - reset - end - - def set_response_status(status) - @status = status - end - - def set_server_time(time_millis) - @server_time = Time.at(time_millis.to_f / 1000) - end - - def set_exception(e) - @exception = e - end - - def reset - @requests = [] - @status = 200 - end - - def request(req) - @requests.push(req) - if @exception - raise @exception - else - headers = {} - if @server_time - headers["Date"] = @server_time.httpdate - end - FakeResponse.new(@status ? @status : 200, headers) - end + @result = LaunchDarkly::Impl::EventSenderResult.new(true, false, nil) + @analytics_payloads = Queue.new + @diagnostic_payloads = Queue.new end - def start - end - - def started? - false - end - - def finish - end - - def get_request - @requests.shift - end - end - - class FakeResponse - include Net::HTTPHeader - - attr_reader :code - - def initialize(status, headers) - @code = status.to_s - initialize_http_header(headers) + def send_event_data(data, is_diagnostic) + (is_diagnostic ? @diagnostic_payloads : @analytics_payloads).push(JSON.parse(data, symbolize_names: true)) + @result end end end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 837b775d..212d057b 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -95,7 +95,7 @@ def []=(key, value) let(:bad_file_path) { "no-such-file" } before do - @config = LaunchDarkly::Config.new + @config = LaunchDarkly::Config.new(logger: $null_log) @store = @config.feature_store @tmp_dir = Dir.mktmpdir end diff --git a/spec/http_util.rb b/spec/http_util.rb index e43e2ded..27032589 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -52,6 +52,13 @@ def setup_response(uri_path, &action) @server.mount_proc(uri_path, action) end + def setup_status_response(uri_path, status, headers={}) + setup_response(uri_path) do |req, res| + res.status = status + headers.each { |n, v| res[n] = v } + end + end + def setup_ok_response(uri_path, body, content_type=nil, headers={}) setup_response(uri_path) do |req, res| res.status = 200 @@ -63,11 +70,17 @@ def setup_ok_response(uri_path, body, content_type=nil, headers={}) def record_request(req, res) @requests.push(req) - @requests_queue << req + @requests_queue << [req, req.body] end def await_request - @requests_queue.pop + r = @requests_queue.pop + r[0] + end + + def await_request_with_body + r = @requests_queue.pop + return r[0], r[1] end end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index e74d0f0d..bad1e736 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -4,8 +4,6 @@ $my_prefix = 'testprefix' -$null_log = ::Logger.new($stdout) -$null_log.level = ::Logger::FATAL $consul_base_opts = { prefix: $my_prefix, diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 7734670e..3b95edc8 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -6,8 +6,6 @@ $table_name = 'LD_DYNAMODB_TEST_TABLE' $endpoint = 'http://localhost:8000' $my_prefix = 'testprefix' -$null_log = ::Logger.new($stdout) -$null_log.level = ::Logger::FATAL $dynamodb_opts = { credentials: Aws::Credentials.new("key", "secret"), diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 4672a662..1d3bb506 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -461,7 +461,7 @@ def event_processor end describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, data_source: null_data}) } + let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, diagnostic_opt_out: true, data_source: null_data}) } let(:client_with_events) { subject.new("secret", config_with_events) } it "does not use a NullEventProcessor" do diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 690147d0..b0eb46c5 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -6,7 +6,7 @@ let(:requestor) { double() } def with_processor(store) - config = LaunchDarkly::Config.new(feature_store: store) + config = LaunchDarkly::Config.new(feature_store: store, logger: $null_log) processor = subject.new(config, requestor) begin yield processor diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 5aec6658..cf69f334 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -4,10 +4,7 @@ require "spec_helper" - $my_prefix = 'testprefix' -$null_log = ::Logger.new($stdout) -$null_log.level = ::Logger::FATAL $base_opts = { prefix: $my_prefix, diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 502f6d86..6833ea1f 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -4,10 +4,13 @@ $sdk_key = "secret" describe LaunchDarkly::Requestor do - def with_requestor(base_uri) - r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new(base_uri: base_uri)) - yield r - r.stop + def with_requestor(base_uri, opts = {}) + r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new({ base_uri: base_uri }.merge(opts))) + begin + yield r + ensure + r.stop + end end describe "request_all_flags" do @@ -56,6 +59,19 @@ def with_requestor(base_uri) end end + it "sends wrapper header if configured" do + with_server do |server| + with_requestor(server.base_uri.to_s, { wrapper_name: 'MyWrapper', wrapper_version: '1.0' }) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_all_data() + expect(server.requests.count).to eq 1 + expect(server.requests[0].header).to include({ + "x-launchdarkly-wrapper" => [ "MyWrapper/1.0" ] + }) + end + end + end + it "can reuse cached data" do etag = "xyz" expected_data = { flags: { x: { key: "x" } } } diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index cc5e312b..52926ac1 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -3,6 +3,9 @@ require "ldclient-rb" +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + RSpec.configure do |config| config.before(:each) do end From ddfbd17bbbbe04b5c3d4968f291948f35a89b430 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 17 Mar 2020 18:28:07 -0700 Subject: [PATCH 153/292] update ruby-eventsource to 1.0.3 for backoff bug --- Gemfile.lock | 4 ++-- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 0087dad9..91f72a95 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -4,7 +4,7 @@ PATH launchdarkly-server-sdk (5.7.0) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (= 1.0.2) + ld-eventsource (= 1.0.3) semantic (~> 1.6) GEM @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.2) + ld-eventsource (1.0.3) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index f69c74fa..237474ef 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -35,5 +35,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", "1.0.2" + spec.add_runtime_dependency "ld-eventsource", "1.0.3" end From 1fe77bb2da953768b251f3eced173d1689a89c32 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 27 Mar 2020 12:01:10 -0700 Subject: [PATCH 154/292] fix incorrect initialization of EventProcessor --- lib/ldclient-rb/events.rb | 4 +- lib/ldclient-rb/impl/event_sender.rb | 5 +- lib/ldclient-rb/ldclient.rb | 2 +- spec/event_sender_spec.rb | 16 ++-- spec/events_spec.rb | 2 +- spec/ldclient_end_to_end_spec.rb | 123 +++++++++++++++++++++++++++ 6 files changed, 137 insertions(+), 15 deletions(-) create mode 100644 spec/ldclient_end_to_end_spec.rb diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 9313b670..0b65f3d5 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -319,7 +319,7 @@ def trigger_flush(outbox, flush_workers) success = flush_workers.post do begin events_out = @formatter.make_output_events(payload.events, payload.summary) - result = @event_sender.send_event_data(events_out.to_json, false) + result = @event_sender.send_event_data(events_out.to_json, "#{events_out.length} events", false) @disabled.value = true if result.must_shutdown if !result.time_from_server.nil? @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i @@ -348,7 +348,7 @@ def send_diagnostic_event(event, diagnostic_event_workers) uri = URI(@config.events_uri + "/diagnostic") diagnostic_event_workers.post do begin - @event_sender.send_event_data(event.to_json, true) + @event_sender.send_event_data(event.to_json, "diagnostic event", true) rescue => e Util.log_exception(@config.logger, "Unexpected error in event processor", e) end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index 834cd3a3..f6da0843 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -18,10 +18,9 @@ def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETR @retry_interval = retry_interval end - def send_event_data(event_data, is_diagnostic) + def send_event_data(event_data, description, is_diagnostic) uri = is_diagnostic ? @diagnostic_uri : @events_uri payload_id = is_diagnostic ? nil : SecureRandom.uuid - description = is_diagnostic ? 'diagnostic event' : "#{event_data.length} events" res = nil (0..1).each do |attempt| if attempt > 0 @@ -30,7 +29,7 @@ def send_event_data(event_data, is_diagnostic) end begin @client.start if !@client.started? - @logger.debug { "[LDClient] sending #{description}: #{body}" } + @logger.debug { "[LDClient] sending #{description}: #{event_data}" } req = Net::HTTP::Post.new(uri) req.content_type = "application/json" req.body = event_data diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 06db4f00..ed0a724e 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -56,7 +56,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new else - @event_processor = EventProcessor.new(sdk_key, config, diagnostic_accumulator) + @event_processor = EventProcessor.new(sdk_key, config, nil, diagnostic_accumulator) end if @config.use_ldd? diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index e99761b7..0519aebb 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -27,7 +27,7 @@ def with_sender_and_server with_sender_and_server do |es, server| server.setup_ok_response("/bulk", "") - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be true expect(result.must_shutdown).to be false @@ -49,8 +49,8 @@ def with_sender_and_server with_sender_and_server do |es, server| server.setup_ok_response("/bulk", "") - result1 = es.send_event_data(fake_data, false) - result2 = es.send_event_data(fake_data, false) + result1 = es.send_event_data(fake_data, "", false) + result2 = es.send_event_data(fake_data, "", false) expect(result1.success).to be true expect(result2.success).to be true @@ -66,7 +66,7 @@ def with_sender_and_server with_sender_and_server do |es, server| server.setup_ok_response("/diagnostic", "") - result = es.send_event_data(fake_data, true) + result = es.send_event_data(fake_data, "", true) expect(result.success).to be true expect(result.must_shutdown).to be false @@ -94,7 +94,7 @@ def with_sender_and_server es = make_sender(server) - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be true @@ -116,7 +116,7 @@ def with_sender_and_server res.status = req_count == 2 ? 200 : status end - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be true expect(result.must_shutdown).to be false @@ -141,7 +141,7 @@ def with_sender_and_server res.status = req_count == 3 ? 200 : status end - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be false expect(result.must_shutdown).to be false @@ -164,7 +164,7 @@ def with_sender_and_server res.status = status end - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be false expect(result.must_shutdown).to be true diff --git a/spec/events_spec.rb b/spec/events_spec.rb index a36fa95f..c32eeb29 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -578,7 +578,7 @@ def initialize @diagnostic_payloads = Queue.new end - def send_event_data(data, is_diagnostic) + def send_event_data(data, description, is_diagnostic) (is_diagnostic ? @diagnostic_payloads : @analytics_payloads).push(JSON.parse(data, symbolize_names: true)) @result end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb new file mode 100644 index 00000000..b93a98b4 --- /dev/null +++ b/spec/ldclient_end_to_end_spec.rb @@ -0,0 +1,123 @@ +require "http_util" +require "spec_helper" + + +SDK_KEY = "sdk-key" + +USER = { key: 'userkey' } + +ALWAYS_TRUE_FLAG = { key: 'flagkey', version: 1, on: false, offVariation: 1, variations: [ false, true ] } +DATA_WITH_ALWAYS_TRUE_FLAG = { + flags: { ALWAYS_TRUE_FLAG[:key ].to_sym => ALWAYS_TRUE_FLAG }, + segments: {} +} +PUT_EVENT_WITH_ALWAYS_TRUE_FLAG = "event: put\ndata:{\"data\":#{DATA_WITH_ALWAYS_TRUE_FLAG.to_json}}\n\n'" + +def with_client(config) + client = LaunchDarkly::LDClient.new(SDK_KEY, config) + begin + yield client + ensure + client.close + end +end + +module LaunchDarkly + # Note that we can't do end-to-end tests in streaming mode until we have a test server that can do streaming + # responses, which is difficult in WEBrick. + + describe "LDClient end-to-end" do + it "starts in polling mode" do + with_server do |poll_server| + poll_server.setup_ok_response("/sdk/latest-all", DATA_WITH_ALWAYS_TRUE_FLAG.to_json, "application/json") + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + send_events: false, + logger: NullLogger.new + ) + with_client(config) do |client| + expect(client.initialized?).to be true + expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be true + end + end + end + + it "fails in polling mode with 401 error" do + with_server do |poll_server| + poll_server.setup_status_response("/sdk/latest-all", 401) + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + send_events: false, + logger: NullLogger.new + ) + with_client(config) do |client| + expect(client.initialized?).to be false + expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be false + end + end + end + + it "sends event without diagnostics" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + events_uri: events_server.base_uri.to_s, + diagnostic_opt_out: true, + logger: NullLogger.new + ) + with_client(config) do |client| + client.identify(USER) + client.flush + + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ SDK_KEY ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" + end + end + end + end + + it "sends diagnostic event" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + events_server.setup_ok_response("/diagnostic", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + events_uri: events_server.base_uri.to_s, + logger: NullLogger.new + ) + with_client(config) do |client| + user = { key: 'userkey' } + client.identify(user) + client.flush + + req0, body0 = events_server.await_request_with_body + req1, body1 = events_server.await_request_with_body + req = req0.path == "/diagnostic" ? req0 : req1 + body = req0.path == "/diagnostic" ? body0 : body1 + expect(req.header['authorization']).to eq [ SDK_KEY ] + data = JSON.parse(body) + expect(data["kind"]).to eq "diagnostic-init" + end + end + end + end + + # TODO: TLS tests with self-signed cert + end +end From 6f0e1e5ee54eddbc0fb7682d06d93ba3947b232a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 27 Apr 2020 14:13:05 -0700 Subject: [PATCH 155/292] remove install-time openssl check that breaks if you don't have rake --- ext/mkrf_conf.rb | 11 ----------- launchdarkly-server-sdk.gemspec | 1 - 2 files changed, 12 deletions(-) delete mode 100644 ext/mkrf_conf.rb diff --git a/ext/mkrf_conf.rb b/ext/mkrf_conf.rb deleted file mode 100644 index 23c2c7b6..00000000 --- a/ext/mkrf_conf.rb +++ /dev/null @@ -1,11 +0,0 @@ -require "rubygems" - - -# From http://stackoverflow.com/questions/5830835/how-to-add-openssl-dependency-to-gemspec -# the whole reason this file exists: to return an error if openssl -# isn't installed. -require "openssl" - -f = File.open(File.join(File.dirname(__FILE__), "Rakefile"), "w") # create dummy rakefile to indicate success -f.write("task :default\n") -f.close diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 237474ef..67b39daf 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -19,7 +19,6 @@ Gem::Specification.new do |spec| spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] - spec.extensions = 'ext/mkrf_conf.rb' spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" From e35f8abbe8f256027a33125f0485b1307c79b9b8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 27 Apr 2020 15:12:03 -0700 Subject: [PATCH 156/292] treat comparison with wrong data type as a non-match, not an exception (#134) --- lib/ldclient-rb/evaluation.rb | 25 +++++++++++++++++-------- spec/evaluation_spec.rb | 12 ++++++------ 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index d0d2aa38..3c18e7ff 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -140,35 +140,44 @@ def self.comparator(converter) end, endsWith: lambda do |a, b| - (a.is_a? String) && (a.end_with? b) + (a.is_a? String) && (b.is_a? String) && (a.end_with? b) end, startsWith: lambda do |a, b| - (a.is_a? String) && (a.start_with? b) + (a.is_a? String) && (b.is_a? String) && (a.start_with? b) end, matches: lambda do |a, b| - (b.is_a? String) && !(Regexp.new b).match(a).nil? + if (b.is_a? String) && (b.is_a? String) + begin + re = Regexp.new b + !re.match(a).nil? + rescue + false + end + else + false + end end, contains: lambda do |a, b| - (a.is_a? String) && (a.include? b) + (a.is_a? String) && (b.is_a? String) && (a.include? b) end, lessThan: lambda do |a, b| - (a.is_a? Numeric) && (a < b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a < b) end, lessThanOrEqual: lambda do |a, b| - (a.is_a? Numeric) && (a <= b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a <= b) end, greaterThan: lambda do |a, b| - (a.is_a? Numeric) && (a > b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a > b) end, greaterThanOrEqual: lambda do |a, b| - (a.is_a? Numeric) && (a >= b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a >= b) end, before: comparator(DATE_OPERAND) { |n| n < 0 }, diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 14d5ed80..b8bed817 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -495,13 +495,13 @@ def boolean_flag_with_clauses(clauses) # mixed strings and numbers [ :in, "99", 99, false ], [ :in, 99, "99", false ], - #[ :contains, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :startsWith, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :endsWith, "99", 99, false ] # currently throws exception - would return false in Java SDK + [ :contains, "99", 99, false ], + [ :startsWith, "99", 99, false ], + [ :endsWith, "99", 99, false ], [ :lessThanOrEqual, "99", 99, false ], - #[ :lessThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + [ :lessThanOrEqual, 99, "99", false ], [ :greaterThanOrEqual, "99", 99, false ], - #[ :greaterThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + [ :greaterThanOrEqual, 99, "99", false ], # regex [ :matches, "hello world", "hello.*rld", true ], @@ -509,7 +509,7 @@ def boolean_flag_with_clauses(clauses) [ :matches, "hello world", "l+", true ], [ :matches, "hello world", "(world|planet)", true ], [ :matches, "hello world", "aloha", false ], - #[ :matches, "hello world", "***not a regex", false ] # currently throws exception - same as Java SDK + [ :matches, "hello world", "***not a regex", false ], # dates [ :before, dateStr1, dateStr2, true ], From 3e55dc410280b7562ad6bdfbf05b70804e3b2272 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 27 Apr 2020 15:12:45 -0700 Subject: [PATCH 157/292] fail fast for nil SDK key when appropriate --- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/ldclient.rb | 11 ++++++++++ spec/ldclient_spec.rb | 40 +++++++++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 0b65f3d5..a5352a0b 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -91,6 +91,7 @@ class StopMessage < SynchronousMessage # @private class EventProcessor def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test_properties = nil) + raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key @logger = config.logger @inbox = SizedQueue.new(config.capacity < 100 ? 100 : config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ed0a724e..1dc0cc25 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -33,6 +33,16 @@ class LDClient # @return [LDClient] The LaunchDarkly client instance # def initialize(sdk_key, config = Config.default, wait_for_sec = 5) + # Note that sdk_key is normally a required parameter, and a nil value would cause the SDK to + # fail in most configurations. However, there are some configurations where it would be OK + # (offline = true, *or* we are using LDD mode or the file data source and events are disabled + # so we're not connecting to any LD services) so rather than try to check for all of those + # up front, we will let the constructors for the data source implementations implement this + # fail-fast as appropriate, and just check here for the part regarding events. + if !config.offline? && config.send_events + raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? + end + @sdk_key = sdk_key @event_factory_default = EventFactory.new(false) @@ -352,6 +362,7 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) if config.offline? return NullUpdateProcessor.new end + raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key requestor = Requestor.new(sdk_key, config) if config.stream? StreamProcessor.new(sdk_key, config, requestor, diagnostic_accumulator) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 1d3bb506..40ce5a1d 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -49,6 +49,46 @@ def event_processor client.instance_variable_get(:@event_processor) end + describe "constructor requirement of non-nil sdk key" do + it "is not enforced when offline" do + subject.new(nil, offline_config) + end + + it "is not enforced if use_ldd is true and send_events is false" do + subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true, send_events: false })) + end + + it "is not enforced if using file data and send_events is false" do + source = LaunchDarkly::FileDataSource.factory({}) + subject.new(nil, LaunchDarkly::Config.new({ data_source: source, send_events: false })) + end + + it "is enforced in streaming mode even if send_events is false" do + expect { + subject.new(nil, LaunchDarkly::Config.new({ send_events: false })) + }.to raise_error(ArgumentError) + end + + it "is enforced in polling mode even if send_events is false" do + expect { + subject.new(nil, LaunchDarkly::Config.new({ stream: false, send_events: false })) + }.to raise_error(ArgumentError) + end + + it "is enforced if use_ldd is true and send_events is true" do + expect { + subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true })) + }.to raise_error(ArgumentError) + end + + it "is enforced if using file data and send_events is true" do + source = LaunchDarkly::FileDataSource.factory({}) + expect { + subject.new(nil, LaunchDarkly::Config.new({ data_source: source })) + }.to raise_error(ArgumentError) + end + end + describe '#variation' do feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, trackEvents: true, debugEventsUntilDate: 1000 } From ad7cd7f05c44aa353fb3d5a3f36130eb62a45187 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 4 May 2020 14:25:25 -0700 Subject: [PATCH 158/292] tolerate nil value for user.custom (#137) --- lib/ldclient-rb/user_filter.rb | 5 +++-- spec/events_spec.rb | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index 8cbf67ca..b67f6844 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -15,8 +15,9 @@ def transform_user_props(user_props) user_private_attrs = Set.new((user_props[:privateAttributeNames] || []).map(&:to_sym)) filtered_user_props, removed = filter_values(user_props, user_private_attrs, ALLOWED_TOP_LEVEL_KEYS, IGNORED_TOP_LEVEL_KEYS) - if user_props.has_key?(:custom) - filtered_user_props[:custom], removed_custom = filter_values(user_props[:custom], user_private_attrs) + custom = user_props[:custom] + if !custom.nil? + filtered_user_props[:custom], removed_custom = filter_values(custom, user_private_attrs) removed.merge(removed_custom) end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index c32eeb29..d7854567 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -408,6 +408,17 @@ def with_processor_and_sender(config) end end + it "treats nil value for custom the same as an empty hash" do + with_processor_and_sender(default_config) do |ep, sender| + user_with_nil_custom = { key: "userkey", custom: nil } + e = { kind: "identify", key: "userkey", user: user_with_nil_custom } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly(e) + end + end + it "does a final flush when shutting down" do with_processor_and_sender(default_config) do |ep, sender| e = { kind: "identify", key: user[:key], user: user } From 4ca5ad3581ab7da19a22ad192254d676fb1641c9 Mon Sep 17 00:00:00 2001 From: Jacob Smith Date: Wed, 27 May 2020 13:05:19 -0400 Subject: [PATCH 159/292] Only shutdown the Redis pool if it is owned by the SDK (#158) * Only shutdown a Redis pool created by SDK * Make pool shutdown behavior an option --- .../impl/integrations/redis_impl.rb | 3 ++ lib/ldclient-rb/integrations/redis.rb | 1 + lib/ldclient-rb/redis_store.rb | 1 + spec/redis_feature_store_spec.rb | 37 +++++++++++++++++-- 4 files changed, 38 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 107340f8..876f4240 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -33,6 +33,8 @@ def initialize(opts) @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do ::Redis.new(@redis_opts) end + # shutdown pool on close unless the client passed a custom pool and specified not to shutdown + @pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true)) @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -118,6 +120,7 @@ def initialized_internal? def stop if @stopped.make_true + return unless @pool_shutdown_on_close @pool.shutdown { |redis| redis.close } end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 7e447657..396c1b35 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -45,6 +45,7 @@ def self.default_prefix # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(opts) diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 48632411..b94e61f2 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -35,6 +35,7 @@ class RedisFeatureStore # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. # def initialize(opts = {}) core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index cf69f334..e3a179b1 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -1,3 +1,4 @@ +require "connection_pool" require "feature_store_spec_base" require "json" require "redis" @@ -27,11 +28,11 @@ def clear_all_data describe LaunchDarkly::RedisFeatureStore do subject { LaunchDarkly::RedisFeatureStore } - + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a Redis instance running on the default port. - + context "real Redis with local cache" do include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end @@ -59,7 +60,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve flag = { key: "foo", version: 1 } test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) store = create_redis_store({ test_hook: test_hook }) - + begin store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) @@ -77,7 +78,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve flag = { key: "foo", version: 1 } test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) store = create_redis_store({ test_hook: test_hook }) - + begin store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) @@ -89,4 +90,32 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve other_client.close end end + + it "shuts down a custom Redis pool by default" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + store = create_redis_store({ pool: unowned_pool }) + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end + + it "doesn't shut down a custom Redis pool if pool_shutdown_on_close = false" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + store = create_redis_store({ pool: unowned_pool, pool_shutdown_on_close: false }) + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.not_to raise_error(ConnectionPool::PoolShuttingDownError) + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end end From 06d55d5d8935a303d041cf6003fc185d7ec747ae Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 27 May 2020 11:51:03 -0700 Subject: [PATCH 160/292] improve doc comment --- lib/ldclient-rb/integrations/redis.rb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 396c1b35..22bad6ef 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -45,7 +45,9 @@ def self.default_prefix # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired - # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool; + # this is true by default, and should be set to false only if you are managing the pool yourself and want its + # lifecycle to be independent of the SDK client # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(opts) From 77bf917150e1fd2735fe906287e5316b1033b730 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 26 Jun 2020 18:37:52 -0700 Subject: [PATCH 161/292] remove support for indirect/patch and indirect/put (#138) --- lib/ldclient-rb/ldclient.rb | 4 ++-- lib/ldclient-rb/requestor.rb | 8 -------- lib/ldclient-rb/stream.rb | 25 +------------------------ spec/requestor_spec.rb | 34 ---------------------------------- spec/stream_spec.rb | 17 +---------------- 5 files changed, 4 insertions(+), 84 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1dc0cc25..7ea48345 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -363,12 +363,12 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) return NullUpdateProcessor.new end raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key - requestor = Requestor.new(sdk_key, config) if config.stream? - StreamProcessor.new(sdk_key, config, requestor, diagnostic_accumulator) + StreamProcessor.new(sdk_key, config, diagnostic_accumulator) else config.logger.info { "Disabling streaming API" } config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } + requestor = Requestor.new(sdk_key, config) PollingProcessor.new(config, requestor) end end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index eae0a193..378a1a35 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -26,14 +26,6 @@ def initialize(sdk_key, config) @cache = @config.cache_store end - def request_flag(key) - make_request("/sdk/latest-flags/" + key) - end - - def request_segment(key) - make_request("/sdk/latest-segments/" + key) - end - def request_all_data() make_request("/sdk/latest-all") end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index e27fad32..00791eb3 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -10,10 +10,6 @@ module LaunchDarkly # @private DELETE = :delete # @private - INDIRECT_PUT = :'indirect/put' - # @private - INDIRECT_PATCH = :'indirect/patch' - # @private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes # @private @@ -24,11 +20,10 @@ module LaunchDarkly # @private class StreamProcessor - def initialize(sdk_key, config, requestor, diagnostic_accumulator = nil) + def initialize(sdk_key, config, diagnostic_accumulator = nil) @sdk_key = sdk_key @config = config @feature_store = config.feature_store - @requestor = requestor @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) @stopped = Concurrent::AtomicBoolean.new(false) @@ -112,24 +107,6 @@ def process_message(message) break end end - elsif method == INDIRECT_PUT - all_data = @requestor.request_all_data - @feature_store.init({ - FEATURES => all_data[:flags], - SEGMENTS => all_data[:segments] - }) - @initialized.make_true - @config.logger.info { "[LDClient] Stream initialized (via indirect message)" } - elsif method == INDIRECT_PATCH - key = key_for_path(FEATURES, message.data) - if key - @feature_store.upsert(FEATURES, @requestor.request_flag(key)) - else - key = key_for_path(SEGMENTS, message.data) - if key - @feature_store.upsert(SEGMENTS, @requestor.request_segment(key)) - end - end else @config.logger.warn { "[LDClient] Unknown message received: #{method}" } end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 6833ea1f..6751517a 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -193,38 +193,4 @@ def with_requestor(base_uri, opts = {}) end end end - - describe "request_flag" do - it "uses expected URI and headers" do - with_server do |server| - with_requestor(server.base_uri.to_s) do |requestor| - server.setup_ok_response("/", "{}") - requestor.request_flag("key") - expect(server.requests.count).to eq 1 - expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-flags/key" - expect(server.requests[0].header).to include({ - "authorization" => [ $sdk_key ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] - }) - end - end - end - end - - describe "request_segment" do - it "uses expected URI and headers" do - with_server do |server| - with_requestor(server.base_uri.to_s) do |requestor| - server.setup_ok_response("/", "{}") - requestor.request_segment("key") - expect(server.requests.count).to eq 1 - expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-segments/key" - expect(server.requests[0].header).to include({ - "authorization" => [ $sdk_key ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] - }) - end - end - end - end end diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 648833ff..39c678c4 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -4,8 +4,7 @@ describe LaunchDarkly::StreamProcessor do subject { LaunchDarkly::StreamProcessor } let(:config) { LaunchDarkly::Config.new } - let(:requestor) { double() } - let(:processor) { subject.new("sdk_key", config, requestor) } + let(:processor) { subject.new("sdk_key", config) } describe '#process_message' do let(:put_message) { SSE::StreamEvent.new(:put, '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } @@ -13,8 +12,6 @@ let(:patch_seg_message) { SSE::StreamEvent.new(:patch, '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } let(:delete_flag_message) { SSE::StreamEvent.new(:delete, '{"path": "/flags/key", "version": 2}') } let(:delete_seg_message) { SSE::StreamEvent.new(:delete, '{"path": "/segments/key", "version": 2}') } - let(:indirect_patch_flag_message) { SSE::StreamEvent.new(:'indirect/patch', "/flags/key") } - let(:indirect_patch_segment_message) { SSE::StreamEvent.new(:'indirect/patch', "/segments/key") } it "will accept PUT methods" do processor.send(:process_message, put_message) @@ -39,18 +36,6 @@ processor.send(:process_message, delete_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end - it "will accept INDIRECT PATCH method for flags" do - flag = { key: 'key', version: 1 } - allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) - processor.send(:process_message, indirect_patch_flag_message); - expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) - end - it "will accept INDIRECT PATCH method for segments" do - segment = { key: 'key', version: 1 } - allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) - processor.send(:process_message, indirect_patch_segment_message); - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) - end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn processor.send(:process_message, SSE::StreamEvent.new(type: :get, data: "", id: nil)) From cf7c8a7830e72a18f6db24c72caa0b1e0a2619cd Mon Sep 17 00:00:00 2001 From: Elliot <35050275+Apache-HB@users.noreply.github.com> Date: Mon, 9 Nov 2020 12:46:49 -0800 Subject: [PATCH 162/292] update to json 2.3.1 (#139) * update json dep to 2.3.x to fix CVE --- Gemfile.lock | 99 +++++++++++++++------------------ launchdarkly-server-sdk.gemspec | 13 ++++- spec/spec_helper.rb | 3 - 3 files changed, 54 insertions(+), 61 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index c04d73aa..54bb3bc9 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -3,92 +3,81 @@ PATH specs: launchdarkly-server-sdk (5.8.0) concurrent-ruby (~> 1.0) - json (>= 1.8, < 3) + json (~> 2.3.1) ld-eventsource (= 1.0.3) semantic (~> 1.6) GEM remote: https://rubygems.org/ specs: - aws-eventstream (1.0.1) - aws-partitions (1.128.0) - aws-sdk-core (3.44.2) - aws-eventstream (~> 1.0) - aws-partitions (~> 1.0) - aws-sigv4 (~> 1.0) + aws-eventstream (1.1.0) + aws-partitions (1.388.0) + aws-sdk-core (3.109.1) + aws-eventstream (~> 1, >= 1.0.2) + aws-partitions (~> 1, >= 1.239.0) + aws-sigv4 (~> 1.1) jmespath (~> 1.0) - aws-sdk-dynamodb (1.19.0) - aws-sdk-core (~> 3, >= 3.39.0) - aws-sigv4 (~> 1.0) - aws-sigv4 (1.0.3) - codeclimate-test-reporter (0.6.0) - simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.6) - connection_pool (2.2.1) - diff-lcs (1.3) - diplomat (2.0.2) - faraday (~> 0.9) - json - docile (1.1.5) - faraday (0.15.4) + aws-sdk-dynamodb (1.55.0) + aws-sdk-core (~> 3, >= 3.109.0) + aws-sigv4 (~> 1.1) + aws-sigv4 (1.2.2) + aws-eventstream (~> 1, >= 1.0.2) + concurrent-ruby (1.1.7) + connection_pool (2.2.3) + deep_merge (1.2.1) + diff-lcs (1.4.4) + diplomat (2.4.2) + deep_merge (~> 1.0, >= 1.0.1) + faraday (>= 0.9, < 1.1.0) + faraday (0.17.3) multipart-post (>= 1.2, < 3) - ffi (1.9.25) - ffi (1.9.25-java) + ffi (1.12.0) hitimes (1.3.1) - hitimes (1.3.1-java) http_tools (0.4.5) jmespath (1.4.0) - json (1.8.6) - json (1.8.6-java) + json (2.3.1) ld-eventsource (1.0.3) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) - listen (3.1.5) - rb-fsevent (~> 0.9, >= 0.9.4) - rb-inotify (~> 0.9, >= 0.9.7) - ruby_dep (~> 1.2) - multipart-post (2.0.0) - rb-fsevent (0.10.3) - rb-inotify (0.9.10) - ffi (>= 0.5.0, < 2) + listen (3.2.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + multipart-post (2.1.1) + rb-fsevent (0.10.4) + rb-inotify (0.10.1) + ffi (~> 1.0) redis (3.3.5) - rspec (3.7.0) - rspec-core (~> 3.7.0) - rspec-expectations (~> 3.7.0) - rspec-mocks (~> 3.7.0) - rspec-core (3.7.1) - rspec-support (~> 3.7.0) - rspec-expectations (3.7.0) + rspec (3.9.0) + rspec-core (~> 3.9.0) + rspec-expectations (~> 3.9.0) + rspec-mocks (~> 3.9.0) + rspec-core (3.9.3) + rspec-support (~> 3.9.3) + rspec-expectations (3.9.3) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-mocks (3.7.0) + rspec-support (~> 3.9.0) + rspec-mocks (3.9.1) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-support (3.7.0) + rspec-support (~> 3.9.0) + rspec-support (3.9.4) rspec_junit_formatter (0.3.0) rspec-core (>= 2, < 4, != 2.12.0) - ruby_dep (1.5.0) semantic (1.6.1) - simplecov (0.15.1) - docile (~> 1.1.0) - json (>= 1.8, < 3) - simplecov-html (~> 0.10.0) - simplecov-html (0.10.2) socketry (0.5.1) hitimes (~> 1.2) - timecop (0.9.1) + timecop (0.9.2) PLATFORMS - java ruby DEPENDENCIES aws-sdk-dynamodb (~> 1.18) - bundler (~> 1.7) - codeclimate-test-reporter (~> 0) + bundler (~> 1.17) connection_pool (>= 2.1.2) diplomat (>= 2.0.2) + faraday (~> 0.17) + ffi (<= 1.12) launchdarkly-server-sdk! listen (~> 3.0) redis (~> 3.3.5) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 67b39daf..b8493985 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -21,18 +21,25 @@ Gem::Specification.new do |spec| spec.require_paths = ["lib"] spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" - spec.add_development_dependency "bundler", "~> 1.7" + spec.add_development_dependency "bundler", "~> 1.17" spec.add_development_dependency "rspec", "~> 3.2" - spec.add_development_dependency "codeclimate-test-reporter", "~> 0" spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb + # these are transitive dependencies of listen and consul respectively + # we constrain them here to make sure the ruby 2.2, 2.3, and 2.4 CI + # cases all pass + spec.add_development_dependency "ffi", "<= 1.12" # >1.12 doesnt support ruby 2.2 + spec.add_development_dependency "faraday", "~> 0.17" # >=0.18 doesnt support ruby 2.2 - spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "ld-eventsource", "1.0.3" + + # lock json to 2.3.x as ruby libraries often remove + # support for older ruby versions in minor releases + spec.add_runtime_dependency "json", "~> 2.3.1" end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 52926ac1..8438ecc2 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,6 +1,3 @@ -require "codeclimate-test-reporter" -CodeClimate::TestReporter.start - require "ldclient-rb" $null_log = ::Logger.new($stdout) From 4cc6d9cd85d68e7d38994e65a2e00cb7752ee4d1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 6 Jan 2021 11:30:43 -0800 Subject: [PATCH 163/292] add publication of API docs on GitHub Pages (#143) --- .gitignore | 2 +- .ldrelease/build-docs.sh | 18 ++++++++++++++++++ .ldrelease/config.yml | 13 +++++++++---- .yardopts | 9 --------- README.md | 4 +++- docs/Makefile | 26 ++++++++++++++++++++++++++ docs/index.md | 9 +++++++++ scripts/gendocs.sh | 11 ----------- scripts/release.sh | 27 --------------------------- 9 files changed, 66 insertions(+), 53 deletions(-) create mode 100755 .ldrelease/build-docs.sh delete mode 100644 .yardopts create mode 100644 docs/Makefile create mode 100644 docs/index.md delete mode 100755 scripts/gendocs.sh delete mode 100755 scripts/release.sh diff --git a/.gitignore b/.gitignore index 3f9d02f2..9e998e64 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ /.yardoc /_yardoc/ /coverage/ -/doc/ +/docs/build /pkg/ /spec/reports/ /tmp/ diff --git a/.ldrelease/build-docs.sh b/.ldrelease/build-docs.sh new file mode 100755 index 00000000..3b581297 --- /dev/null +++ b/.ldrelease/build-docs.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# doc generation is not part of Releaser's standard Ruby project template + +mkdir -p ./artifacts/ + +cd ./docs +make +cd .. + +# Releaser will pick up docs generated in CI if we put an archive of them in the +# artifacts directory and name it docs.tar.gz or docs.zip. They will be uploaded +# to GitHub Pages and also attached as release artifacts. There's no separate +# "publish-docs" step because the external service that also hosts them doesn't +# require an upload, it just picks up gems automatically. + +cd ./docs/build/html +tar cfz ../../../artifacts/docs.tar.gz * diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index 198d0ebc..b900daf1 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -8,10 +8,15 @@ publications: - url: https://www.rubydoc.info/gems/launchdarkly-server-sdk description: documentation -template: - name: ruby - env: - LD_SKIP_DATABASE_TESTS: 1 # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI +circleci: + linux: + image: circleci/ruby:2.6.2-stretch + context: org-global + env: + LD_SKIP_DATABASE_TESTS: 1 # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI + +documentation: + githubPages: true sdk: displayName: "Ruby" diff --git a/.yardopts b/.yardopts deleted file mode 100644 index 5388ac50..00000000 --- a/.yardopts +++ /dev/null @@ -1,9 +0,0 @@ ---no-private ---markup markdown ---embed-mixins -lib/*.rb -lib/**/*.rb -lib/**/**/*.rb -lib/**/**/**/*.rb -- -README.md diff --git a/README.md b/README.md index d3f99b69..bc6cf21d 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,8 @@ LaunchDarkly Server-side SDK for Ruby [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) [![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) +[![RubyDoc](https://img.shields.io/static/v1?label=docs+-+all+versions&message=reference&color=00add8)](https://www.rubydoc.info/gems/launchdarkly-server-sdk) +[![GitHub Pages](https://img.shields.io/static/v1?label=docs+-+latest&message=reference&color=00add8)](https://launchdarkly.github.io/ruby-server-sdk) LaunchDarkly overview ------------------------- @@ -27,7 +29,7 @@ Learn more Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). -Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). +Generated API documentation for all versions of the SDK is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). The API documentation for the latest version is also on [GitHub Pages](https://launchdarkly.github.io/ruby-server-sdk). Testing ------- diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..86a33602 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,26 @@ + +ifeq ($(LD_RELEASE_VERSION),) +TITLE=LaunchDarkly Ruby SDK +else +TITLE=LaunchDarkly Ruby SDK ($(LD_RELEASE_VERSION)) +endif + +.PHONY: dependencies html + +html: dependencies + rm -rf ./build + cd .. && yard doc \ + -o docs/build/html \ + --title "$(TITLE)" \ + --no-private \ + --markup markdown \ + --embed-mixins \ + -r docs/index.md \ + lib/*.rb \ + lib/**/*.rb \ + lib/**/**/*.rb \ + lib/**/**/**/*.rb + +dependencies: + gem install --conservative yard + gem install --conservative redcarpet # provides Markdown formatting diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..4ab76d21 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,9 @@ +# LaunchDarkly Server-side SDK for Ruby + +This generated API documentation lists all types and methods in the SDK. + +The API documentation for the most recent SDK release is hosted on [GitHub Pages](https://launchdarkly.github.io/ruby-server-sdk). API documentation for current and past releases is hosted on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). + +Source code and readme: [GitHub](https://github.com/launchdarkly/ruby-server-sdk) + +SDK reference guide: [docs.launchdarkly.com](https://docs.launchdarkly.com/sdk/server-side/ruby) diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh deleted file mode 100755 index c5ec7dcf..00000000 --- a/scripts/gendocs.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -# Use this script to generate documentation locally in ./doc so it can be proofed before release. -# After release, documentation will be visible at https://www.rubydoc.info/gems/launchdarkly-server-sdk - -gem install --conservative yard -gem install --conservative redcarpet # provides Markdown formatting - -rm -rf doc/* - -yard doc diff --git a/scripts/release.sh b/scripts/release.sh deleted file mode 100755 index 9813240c..00000000 --- a/scripts/release.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash -# This script updates the version for the launchdarkly-server-sdk library and releases it to RubyGems -# It will only work if you have the proper credentials set up in ~/.gem/credentials - -# It takes exactly one argument: the new version. -# It should be run from the root of this git repo like this: -# ./scripts/release.sh 4.0.9 - -# When done you should commit and push the changes made. - -set -uxe -echo "Starting ruby-server-sdk release." - -VERSION=$1 - -#Update version in lib/ldclient-rb/version.rb -VERSION_RB_TEMP=./version.rb.tmp -sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/ldclient-rb/version.rb > ${VERSION_RB_TEMP} -mv ${VERSION_RB_TEMP} lib/ldclient-rb/version.rb - -# Build Ruby Gem -gem build launchdarkly-server-sdk.gemspec - -# Publish Ruby Gem -gem push launchdarkly-server-sdk-${VERSION}.gem - -echo "Done with ruby-server-sdk release" \ No newline at end of file From 5831aa66c89e3231dc7e0d28b99161881049f947 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 19 Jan 2021 16:24:17 -0800 Subject: [PATCH 164/292] try fixing release metadata --- .ldrelease/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index b900daf1..fa4a0557 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -13,7 +13,7 @@ circleci: image: circleci/ruby:2.6.2-stretch context: org-global env: - LD_SKIP_DATABASE_TESTS: 1 # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI + LD_SKIP_DATABASE_TESTS: "1" # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI documentation: githubPages: true From 3214f713c49078d935bcae02c555b5288243ffdd Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Tue, 19 Jan 2021 17:42:47 -0800 Subject: [PATCH 165/292] update the default base url (#144) --- lib/ldclient-rb/config.rb | 6 +++--- lib/ldclient-rb/file_data_source.rb | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 4a3116f3..211a20c7 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -15,7 +15,7 @@ class Config # # @param opts [Hash] the configuration options # @option opts [Logger] :logger See {#logger}. - # @option opts [String] :base_uri ("https://app.launchdarkly.com") See {#base_uri}. + # @option opts [String] :base_uri ("https://sdk.launchdarkly.com") See {#base_uri}. # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") See {#stream_uri}. # @option opts [String] :events_uri ("https://events.launchdarkly.com") See {#events_uri}. # @option opts [Integer] :capacity (10000) See {#capacity}. @@ -314,10 +314,10 @@ def self.default_capacity # # The default value for {#base_uri}. - # @return [String] "https://app.launchdarkly.com" + # @return [String] "https://sdk.launchdarkly.com" # def self.default_base_uri - "https://app.launchdarkly.com" + "https://sdk.launchdarkly.com" end # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 6cc0dc39..76a7c226 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -51,7 +51,7 @@ def self.have_listen? # output as the starting point for your file. In Linux you would do this: # # ``` - # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all # ``` # # The output will look something like this (but with many more properties): From 557d2c497f92e227cc791c0cac87ce8f500af867 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 21 Jan 2021 10:56:29 -0800 Subject: [PATCH 166/292] revert renames of feature_store & update_processor --- lib/ldclient-rb/config.rb | 36 +++++---- lib/ldclient-rb/file_data_source.rb | 8 +- .../impl/integrations/consul_impl.rb | 10 +-- .../impl/integrations/dynamodb_impl.rb | 8 +- .../impl/integrations/redis_impl.rb | 14 ++-- lib/ldclient-rb/impl/store_client_wrapper.rb | 8 +- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 +- lib/ldclient-rb/in_memory_store.rb | 10 +-- lib/ldclient-rb/integrations/consul.rb | 12 +-- lib/ldclient-rb/integrations/dynamodb.rb | 20 ++--- lib/ldclient-rb/integrations/redis.rb | 22 +++--- .../integrations/util/store_wrapper.rb | 24 +++--- lib/ldclient-rb/interfaces.rb | 16 ++-- lib/ldclient-rb/ldclient.rb | 20 ++--- lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/redis_store.rb | 16 ++-- lib/ldclient-rb/stream.rb | 8 +- ...pec_base.rb => feature_store_spec_base.rb} | 4 +- spec/file_data_source_spec.rb | 2 +- spec/in_memory_data_store_spec.rb | 12 --- spec/in_memory_feature_store_spec.rb | 12 +++ ...e_spec.rb => consul_feature_store_spec.rb} | 12 +-- ...spec.rb => dynamodb_feature_store_spec.rb} | 12 +-- spec/ldclient_spec.rb | 76 +++++++++---------- spec/polling_spec.rb | 14 ++-- spec/redis_feature_store_spec.rb | 14 ++-- spec/stream_spec.rb | 12 +-- 27 files changed, 210 insertions(+), 200 deletions(-) rename spec/{data_store_spec_base.rb => feature_store_spec_base.rb} (97%) delete mode 100644 spec/in_memory_data_store_spec.rb create mode 100644 spec/in_memory_feature_store_spec.rb rename spec/integrations/{consul_data_store_spec.rb => consul_feature_store_spec.rb} (63%) rename spec/integrations/{dynamodb_data_store_spec.rb => dynamodb_feature_store_spec.rb} (85%) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 211a20c7..df0c73b4 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -23,7 +23,7 @@ class Config # @option opts [Float] :read_timeout (10) See {#read_timeout}. # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. # @option opts [Object] :cache_store See {#cache_store}. - # @option opts [Object] :data_store See {#data_store}. + # @option opts [Object] :feature_store See {#feature_store}. # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. # @option opts [Boolean] :offline (false) See {#offline?}. # @option opts [Float] :poll_interval (30) See {#poll_interval}. @@ -35,6 +35,8 @@ class Config # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. # @option opts [Object] :data_source See {#data_source}. + # @option opts [Object] :update_processor Obsolete synonym for `data_source`. + # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. # @option opts [String] :wrapper_name See {#wrapper_name}. @@ -50,7 +52,7 @@ def initialize(opts = {}) @flush_interval = opts[:flush_interval] || Config.default_flush_interval @connect_timeout = opts[:connect_timeout] || Config.default_connect_timeout @read_timeout = opts[:read_timeout] || Config.default_read_timeout - @data_store = opts[:data_store] || Config.default_data_store + @feature_store = opts[:feature_store] || Config.default_feature_store @stream = opts.has_key?(:stream) ? opts[:stream] : Config.default_stream @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @@ -61,7 +63,9 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false - @data_source = opts[:data_source] + @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] + @update_processor = opts[:update_processor] + @update_processor_factory = opts[:update_processor_factory] @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval @@ -103,9 +107,9 @@ def stream? # # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not # use polling or streaming to get feature flag updates from the server, but instead reads them - # from the {#data_store data store}, which is assumed to be a database that is populated by + # from the {#feature_store feature store}, which is assumed to be a database that is populated by # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) - # and ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # All other properties related to streaming or polling are ignored if this option is set to true. # @@ -181,13 +185,13 @@ def offline? # # A store for feature flags and related data. The client uses it to store all data received # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to - # {InMemoryDataStore}; for other implementations, see {LaunchDarkly::Integrations}. + # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. # - # For more information, see ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # @return [LaunchDarkly::Interfaces::DataStore] + # @return [LaunchDarkly::Interfaces::FeatureStore] # - attr_reader :data_store + attr_reader :feature_store # # True if all user attributes (other than the key) should be considered private. This means @@ -256,6 +260,12 @@ def offline? # attr_reader :data_source + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor + + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor_factory + # # Set to true to opt out of sending diagnostics data. # @@ -399,11 +409,11 @@ def self.default_use_ldd end # - # The default value for {#data_store}. - # @return [LaunchDarkly::Interfaces::DataStore] an {InMemoryDataStore} + # The default value for {#feature_store}. + # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} # - def self.default_data_store - InMemoryDataStore.new + def self.default_feature_store + InMemoryFeatureStore.new end # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 76a7c226..f58ddf7c 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -118,14 +118,14 @@ class FileDataSource # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return lambda { |sdk_key, config| FileDataSourceImpl.new(config.data_store, config.logger, options) } + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } end end # @private class FileDataSourceImpl - def initialize(data_store, logger, options={}) - @data_store = data_store + def initialize(feature_store, logger, options={}) + @feature_store = feature_store @logger = logger @paths = options[:paths] || [] if @paths.is_a? String @@ -187,7 +187,7 @@ def load_all return end end - @data_store.init(all_data) + @feature_store.init(all_data) @initialized.make_true end diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 34aea72c..2f186dab 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module Consul # - # Internal implementation of the Consul data store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. # - class ConsulDataStoreCore + class ConsulFeatureStoreCore begin require "diplomat" CONSUL_ENABLED = true @@ -17,14 +17,14 @@ class ConsulDataStoreCore def initialize(opts) if !CONSUL_ENABLED - raise RuntimeError.new("can't use Consul data store without the 'diplomat' gem") + raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") end @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? Diplomat.configuration.url = opts[:url] if !opts[:url].nil? - @logger.info("ConsulDataStore: using Consul host at #{Diplomat.configuration.url}") + @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) @@ -90,7 +90,7 @@ def upsert_internal(kind, new_item) else old_item = Model.deserialize(kind, old_value[0]["Value"]) # Check whether the item is stale. If so, don't do the update (and return the existing item to - # DataStoreWrapper so it can be cached) + # FeatureStoreWrapper so it can be cached) if old_item[:version] >= new_item[:version] return old_item end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index fb3a6bd4..464eb5e4 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module DynamoDB # - # Internal implementation of the DynamoDB data store, intended to be used with CachingStoreWrapper. + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. # - class DynamoDBDataStoreCore + class DynamoDBFeatureStoreCore begin require "aws-sdk-dynamodb" AWS_SDK_ENABLED = true @@ -28,7 +28,7 @@ class DynamoDBDataStoreCore def initialize(table_name, opts) if !AWS_SDK_ENABLED - raise RuntimeError.new("can't use DynamoDB data store without the aws-sdk or aws-sdk-dynamodb gem") + raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") end @table_name = table_name @@ -41,7 +41,7 @@ def initialize(table_name, opts) @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end - @logger.info("DynamoDBDataStore: using DynamoDB table \"#{table_name}\"") + @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") end def init_internal(all_data) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 4602fcd7..a4cb1365 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -6,9 +6,9 @@ module Impl module Integrations module Redis # - # Internal implementation of the Redis data store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. # - class RedisDataStoreCore + class RedisFeatureStoreCore begin require "redis" require "connection_pool" @@ -19,7 +19,7 @@ class RedisDataStoreCore def initialize(opts) if !REDIS_ENABLED - raise RuntimeError.new("can't use Redis data store because one of these gems is missing: redis, connection_pool") + raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") end @redis_opts = opts[:redis_opts] || Hash.new @@ -42,7 +42,7 @@ def initialize(opts) @stopped = Concurrent::AtomicBoolean.new(false) with_connection do |redis| - @logger.info("RedisDataStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ and prefix: #{@prefix}") end end @@ -61,7 +61,7 @@ def init_internal(all_data) multi.set(inited_key, inited_key) end end - @logger.info { "RedisDataStore: initialized with #{count} items" } + @logger.info { "RedisFeatureStore: initialized with #{count} items" } end def get_internal(kind, key) @@ -97,13 +97,13 @@ def upsert_internal(kind, new_item) multi.hset(base_key, key, Model.serialize(kind, new_item)) end if result.nil? - @logger.debug { "RedisDataStore: concurrent modification detected, retrying" } + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } try_again = true end else final_item = old_item action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisDataStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb index 8c3160f1..f0948251 100644 --- a/lib/ldclient-rb/impl/store_client_wrapper.rb +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -4,19 +4,19 @@ module LaunchDarkly module Impl # - # Provides additional behavior that the client requires before or after data store operations. + # Provides additional behavior that the client requires before or after feature store operations. # Currently this just means sorting the data set for init(). In the future we may also use this # to provide an update listener capability. # - class DataStoreClientWrapper - include Interfaces::DataStore + class FeatureStoreClientWrapper + include Interfaces::FeatureStore def initialize(store) @store = store end def init(all_data) - @store.init(DataStoreDataSetSorter.sort_all_collections(all_data)) + @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) end def get(kind, key) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 6dad1b36..4454fe75 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -2,10 +2,10 @@ module LaunchDarkly module Impl # - # Implements a dependency graph ordering for data to be stored in a data store. We must use this - # on every data set that will be passed to the data store's init() method. + # Implements a dependency graph ordering for data to be stored in a feature store. We must use this + # on every data set that will be passed to the feature store's init() method. # - class DataStoreDataSetSorter + class FeatureStoreDataSetSorter # # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index d3bee07e..576d90c7 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -2,12 +2,12 @@ module LaunchDarkly - # These constants denote the types of data that can be stored in the data store. If + # These constants denote the types of data that can be stored in the feature store. If # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. # - # The :priority and :get_dependency_keys properties are used by DataStoreDataSetSorter + # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter # to ensure data consistency during non-atomic updates. # @private @@ -24,12 +24,12 @@ module LaunchDarkly }.freeze # - # Default implementation of the LaunchDarkly client's data store, using an in-memory + # Default implementation of the LaunchDarkly client's feature store, using an in-memory # cache. This object holds feature flags and related data received from LaunchDarkly. # Database-backed implementations are available in {LaunchDarkly::Integrations}. # - class InMemoryDataStore - include LaunchDarkly::Interfaces::DataStore + class InMemoryFeatureStore + include LaunchDarkly::Interfaces::FeatureStore def initialize @items = Hash.new diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 0ecf69f8..4f32d5fd 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -5,7 +5,7 @@ module LaunchDarkly module Integrations module Consul # - # Default value for the `prefix` option for {new_data_store}. + # Default value for the `prefix` option for {new_feature_store}. # # @return [String] the default key prefix # @@ -14,10 +14,10 @@ def self.default_prefix end # - # Creates a Consul-backed persistent data store. + # Creates a Consul-backed persistent feature store. # # To use this method, you must first install the gem `diplomat`. Then, put the object returned by - # this method into the `data_store` property of your client configuration ({LaunchDarkly::Config}). + # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default @@ -27,10 +27,10 @@ def self.default_prefix # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::DataStore] a data store object + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_data_store(opts, &block) - core = LaunchDarkly::Impl::Integrations::Consul::ConsulDataStoreCore.new(opts) + def self.new_feature_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index dddf38f0..189e118f 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,17 +5,17 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent data store. For more details about how and why you can - # use a persistent data store, see the + # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or - # the full `aws-sdk`. Then, put the object returned by this method into the `data_store` property + # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property # of your client configuration ({LaunchDarkly::Config}). # - # @example Configuring the data store - # store = LaunchDarkly::Integrations::DynamoDB::new_data_store("my-table-name") - # config = LaunchDarkly::Config.new(data_store: store) + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") + # config = LaunchDarkly::Config.new(feature_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # Note that the specified table must already exist in DynamoDB. It must have a partition key called @@ -31,15 +31,15 @@ module DynamoDB # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) - # @option opts [Object] :existing_client an already-constructed DynamoDB client for the data store to use + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::DataStore] a data store object + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_data_store(table_name, opts) - core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBDataStoreCore.new(table_name, opts) + def self.new_feature_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 89b740a1..22bad6ef 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -4,7 +4,7 @@ module LaunchDarkly module Integrations module Redis # - # Default value for the `redis_url` option for {new_data_store}. This points to an instance of + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of # Redis running at `localhost` with its default port. # # @return [String] the default Redis URL @@ -14,7 +14,7 @@ def self.default_redis_url end # - # Default value for the `prefix` option for {new_data_store}. + # Default value for the `prefix` option for {new_feature_store}. # # @return [String] the default key prefix # @@ -23,17 +23,17 @@ def self.default_prefix end # - # Creates a Redis-backed persistent data store. For more details about how and why you can - # use a persistent data store, see the + # Creates a Redis-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `data_store` property of your + # put the object returned by this method into the `feature_store` property of your # client configuration. # - # @example Configuring the data store - # store = LaunchDarkly::Integrations::Redis::new_data_store(redis_url: "redis://my-server") - # config = LaunchDarkly::Config.new(data_store: store) + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(feature_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options @@ -48,10 +48,10 @@ def self.default_prefix # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool; # this is true by default, and should be set to false only if you are managing the pool yourself and want its # lifecycle to be independent of the SDK client - # @return [LaunchDarkly::Interfaces::DataStore] a data store object + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_data_store(opts) - return RedisDataStore.new(opts) + def self.new_feature_store(opts) + return RedisFeatureStore.new(opts) end end end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index c9ff5bcf..26318d67 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -6,22 +6,22 @@ module LaunchDarkly module Integrations module Util # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::DataStore} + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every data store implementation. + # behavior and other logic that would otherwise be repeated in every feature store implementation. # This makes it easier to create new database integrations by implementing only the database-specific # logic. # - # The mixin {DataStoreCore} describes the methods that need to be supported by the inner + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner # implementation object. # class CachingStoreWrapper - include LaunchDarkly::Interfaces::DataStore + include LaunchDarkly::Interfaces::FeatureStore # # Creates a new store wrapper instance. # - # @param core [Object] an object that implements the {DataStoreCore} methods + # @param core [Object] an object that implements the {FeatureStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache @@ -146,9 +146,9 @@ def items_if_not_deleted(items) # This module describes the methods that you must implement on your own object in order to # use {CachingStoreWrapper}. # - module DataStoreCore + module FeatureStoreCore # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::DataStore#init}, + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # # If possible, the store should update the entire data set atomically. If that is not possible, @@ -164,7 +164,7 @@ def init_internal(all_data) end # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::DataStore#get} + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -177,7 +177,7 @@ def get_internal(kind, key) end # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::DataStore#all} + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -190,13 +190,13 @@ def get_all_internal(kind) end # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::DataStore#upsert} + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. # the method is expected to return the final state of the entity (i.e. either the `item` # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # - # Note that DataStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. # # @param kind [Object] the kind of entity to add or update @@ -208,7 +208,7 @@ def upsert_internal(kind, item) # # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::DataStore#initialized?} except that there is less of a concern + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern # for efficiency, because the wrapper will use caching and memoization in order to call the method # as little as possible. # diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 36bdcd94..d2a9f862 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -5,13 +5,13 @@ module LaunchDarkly # module Interfaces # - # Mixin that defines the required methods of a data store implementation. The LaunchDarkly - # client uses the data store to persist feature flags and related objects received from + # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly + # client uses the feature store to persist feature flags and related objects received from # the LaunchDarkly service. Implementations must support concurrent access and updates. - # For more about how data stores can be used, see: - # [Using a persistent data store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more about how feature stores can be used, see: + # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # An entity that can be stored in a data store is a hash that can be converted to and from + # An entity that can be stored in a feature store is a hash that can be converted to and from # JSON, and that has at a minimum the following properties: `:key`, a string that is unique # among entities of the same kind; `:version`, an integer that is higher for newer data; # `:deleted`, a boolean (optional, defaults to false) that if true means this is a @@ -22,12 +22,12 @@ module Interfaces # `:namespace`, which is a short string unique to that kind. This string can be used as a # collection name or a key prefix. # - # The default implementation is {LaunchDarkly::InMemoryDataStore}. Several implementations + # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # - module DataStore + module FeatureStore # # Initializes (or re-initializes) the store with the specified set of entities. Any # existing entries will be removed. Implementations can assume that this data set is up to @@ -116,7 +116,7 @@ def stop # # Mixin that defines the required methods of a data source implementation. This is the # component that delivers feature flag data from LaunchDarkly to the LDClient by putting - # the data in the {DataStore}. It is expected to run concurrently on its own thread. + # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 6b78f0f8..cfa63351 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -48,13 +48,13 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @event_factory_default = EventFactory.new(false) @event_factory_with_reasons = EventFactory.new(true) - # We need to wrap the data store object with a DataStoreClientWrapper in order to add + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses - # the data store through the Config object, so we need to make a new Config that uses + # the feature store through the Config object, so we need to make a new Config that uses # the wrapped store. - @store = Impl::DataStoreClientWrapper.new(config.data_store) + @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) updated_config = config.clone - updated_config.instance_variable_set(:@data_store, @store) + updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config get_flag = lambda { |key| @store.get(FEATURES, key) } @@ -75,7 +75,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.use_ldd? @config.logger.info { "[LDClient] Started LaunchDarkly Client in LDD mode" } - return # requestor and data processor are not used in this mode + return # requestor and update processor are not used in this mode end data_source_or_factory = @config.data_source || self.method(:create_default_data_source) @@ -150,7 +150,7 @@ def secure_mode_hash(user) # given up permanently (for instance, if your SDK key is invalid). In the meantime, # any call to {#variation} or {#variation_detail} will behave as follows: # - # 1. It will check whether the data store already contains data (that is, you + # 1. It will check whether the feature store already contains data (that is, you # are using a database-backed store and it was populated by a previous run of this # application). If so, it will use the last known feature flag data. # @@ -365,7 +365,7 @@ def close def create_default_data_source(sdk_key, config, diagnostic_accumulator) if config.offline? - return NullDataSource.new + return NullUpdateProcessor.new end raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key if config.stream? @@ -386,9 +386,9 @@ def evaluate_internal(key, user, default, event_factory) if !initialized? if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from data store" } + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else - @config.logger.error { "[LDClient] Client has not finished initializing; data store unavailable, returning default value" } + @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail @@ -443,7 +443,7 @@ def sanitize_user(user) # Used internally when the client is offline. # @private # - class NullDataSource + class NullUpdateProcessor def start e = Concurrent::Event.new e.set diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 5cbc220a..a9312413 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -37,7 +37,7 @@ def stop def poll all_data = @requestor.request_all_data if all_data - @config.data_store.init(all_data) + @config.feature_store.init(all_data) if @initialized.make_true @config.logger.info { "[LDClient] Polling connection initialized" } @ready.set diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index dc266b79..b94e61f2 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -3,28 +3,28 @@ module LaunchDarkly # - # An implementation of the LaunchDarkly client's data store that uses a Redis + # An implementation of the LaunchDarkly client's feature store that uses a Redis # instance. This object holds feature flags and related data received from the # streaming API. Feature data can also be further cached in memory to reduce overhead # of calls to Redis. # # To use this class, you must first have the `redis` and `connection-pool` gems - # installed. Then, create an instance and store it in the `data_store` property + # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may be changed or removed in the future. # - class RedisDataStore - include LaunchDarkly::Interfaces::DataStore + class RedisFeatureStore + include LaunchDarkly::Interfaces::FeatureStore # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating - # to RedisDataStoreCore where the actual database logic is. This class was retained for historical - # reasons, so that existing code can still call RedisDataStore.new. In the future, we will migrate + # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate # away from exposing these concrete classes and use factory methods instead. # - # Constructor for a RedisDataStore instance. + # Constructor for a RedisFeatureStore instance. # # @param opts [Hash] the configuration options # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts) @@ -38,7 +38,7 @@ class RedisDataStore # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. # def initialize(opts = {}) - core = LaunchDarkly::Impl::Integrations::Redis::RedisDataStoreCore.new(opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index bd196488..df50cfd0 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -25,7 +25,7 @@ class StreamProcessor def initialize(sdk_key, config, diagnostic_accumulator = nil) @sdk_key = sdk_key @config = config - @data_store = config.data_store + @feature_store = config.feature_store @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) @stopped = Concurrent::AtomicBoolean.new(false) @@ -85,7 +85,7 @@ def process_message(message) if method == PUT message = JSON.parse(message.data, symbolize_names: true) all_data = Impl::Model.make_all_store_data(message[:data]) - @data_store.init(all_data) + @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @ready.set @@ -96,7 +96,7 @@ def process_message(message) if key data = data[:data] Impl::Model.postprocess_item_after_deserializing!(kind, data) - @data_store.upsert(kind, data) + @feature_store.upsert(kind, data) break end end @@ -105,7 +105,7 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - @data_store.delete(kind, key, data[:version]) + @feature_store.delete(kind, key, data[:version]) break end end diff --git a/spec/data_store_spec_base.rb b/spec/feature_store_spec_base.rb similarity index 97% rename from spec/data_store_spec_base.rb rename to spec/feature_store_spec_base.rb index a937d93e..2d06f0ff 100644 --- a/spec/data_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,9 +1,9 @@ require "spec_helper" -shared_examples "data_store" do |create_store_method, clear_data_method| +shared_examples "feature_store" do |create_store_method, clear_data_method| # Rather than testing with feature flag or segment data, we'll use this fake data kind - # to make it clear that data stores need to be able to handle arbitrary data. + # to make it clear that feature stores need to be able to handle arbitrary data. let(:things_kind) { { namespace: "things" } } let(:key1) { "thing1" } diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 07f5b481..212d057b 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -96,7 +96,7 @@ def []=(key, value) before do @config = LaunchDarkly::Config.new(logger: $null_log) - @store = @config.data_store + @store = @config.feature_store @tmp_dir = Dir.mktmpdir end diff --git a/spec/in_memory_data_store_spec.rb b/spec/in_memory_data_store_spec.rb deleted file mode 100644 index e43a2ebb..00000000 --- a/spec/in_memory_data_store_spec.rb +++ /dev/null @@ -1,12 +0,0 @@ -require "data_store_spec_base" -require "spec_helper" - -def create_in_memory_store(opts = {}) - LaunchDarkly::InMemoryDataStore.new -end - -describe LaunchDarkly::InMemoryDataStore do - subject { LaunchDarkly::InMemoryDataStore } - - include_examples "data_store", method(:create_in_memory_store) -end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb new file mode 100644 index 00000000..c403fc69 --- /dev/null +++ b/spec/in_memory_feature_store_spec.rb @@ -0,0 +1,12 @@ +require "feature_store_spec_base" +require "spec_helper" + +def create_in_memory_store(opts = {}) + LaunchDarkly::InMemoryFeatureStore.new +end + +describe LaunchDarkly::InMemoryFeatureStore do + subject { LaunchDarkly::InMemoryFeatureStore } + + include_examples "feature_store", method(:create_in_memory_store) +end diff --git a/spec/integrations/consul_data_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb similarity index 63% rename from spec/integrations/consul_data_store_spec.rb rename to spec/integrations/consul_feature_store_spec.rb index 1f254bd7..bad1e736 100644 --- a/spec/integrations/consul_data_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "data_store_spec_base" +require "feature_store_spec_base" require "diplomat" require "spec_helper" @@ -11,12 +11,12 @@ } def create_consul_store(opts = {}) - LaunchDarkly::Integrations::Consul::new_data_store( + LaunchDarkly::Integrations::Consul::new_feature_store( $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) - LaunchDarkly::Integrations::Consul::new_data_store( + LaunchDarkly::Integrations::Consul::new_feature_store( $consul_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -25,16 +25,16 @@ def clear_all_data end -describe "Consul data store" do +describe "Consul feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "data_store", method(:create_consul_store), method(:clear_all_data) + include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "data_store", method(:create_consul_store_uncached), method(:clear_all_data) + include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_data_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb similarity index 85% rename from spec/integrations/dynamodb_data_store_spec.rb rename to spec/integrations/dynamodb_feature_store_spec.rb index 7f4e4673..3b95edc8 100644 --- a/spec/integrations/dynamodb_data_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "data_store_spec_base" +require "feature_store_spec_base" require "aws-sdk-dynamodb" require "spec_helper" @@ -20,12 +20,12 @@ } def create_dynamodb_store(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -86,7 +86,7 @@ def create_test_client end -describe "DynamoDB data store" do +describe "DynamoDB feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local DynamoDB instance running. @@ -94,10 +94,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "data_store", method(:create_dynamodb_store), method(:clear_all_data) + include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "data_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) + include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 4ea9522f..76e5b0f7 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,7 +7,7 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:null_data) { LaunchDarkly::NullDataSource.new } + let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } let(:logger) { double().as_null_object } let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do @@ -110,21 +110,21 @@ def event_processor end it "returns the value for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(client.variation("key", user, "default")).to eq "value" end it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) expect(client.variation("key", user, "default")).to eq "default" end it "queues a feature request event for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -140,8 +140,8 @@ def event_processor end it "queues a feature event for an existing feature when user is nil" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -156,8 +156,8 @@ def event_processor end it "queues a feature event for an existing feature when user key is nil" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", @@ -187,8 +187,8 @@ def event_processor trackEvents: true ] } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, flag) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -212,8 +212,8 @@ def event_processor rules: [], trackEventsFallthrough: true } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, flag) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -255,8 +255,8 @@ def event_processor end it "returns a value for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -264,8 +264,8 @@ def event_processor it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -273,8 +273,8 @@ def event_processor end it "queues a feature request event for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -296,28 +296,28 @@ def event_processor let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } it "returns flag values" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({ key: 'userkey' }) expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end it "returns empty map for nil user" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags(nil) expect(result).to eq({}) end it "returns empty map for nil user key" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({}) expect(result).to eq({}) end it "returns empty map if offline" do - offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = offline_client.all_flags(nil) expect(result).to eq({}) @@ -329,7 +329,7 @@ def event_processor let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } it "returns flags state" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be true @@ -362,7 +362,7 @@ def event_processor flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } - config.data_store.init({ LaunchDarkly::FEATURES => { + config.feature_store.init({ LaunchDarkly::FEATURES => { flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 }}) @@ -379,7 +379,7 @@ def event_processor flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true @@ -412,7 +412,7 @@ def event_processor end it "returns empty state for nil user" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state(nil) expect(state.valid?).to be false @@ -420,7 +420,7 @@ def event_processor end it "returns empty state for nil user key" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({}) expect(state.valid?).to be false @@ -428,7 +428,7 @@ def event_processor end it "returns empty state if offline" do - offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = offline_client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be false @@ -512,7 +512,7 @@ def event_processor end end - describe "data store data ordering" do + describe "feature store data ordering" do let(:dependency_ordering_test_data) { { LaunchDarkly::FEATURES => { @@ -529,7 +529,7 @@ def event_processor } } - class FakeDataStore + class FakeFeatureStore attr_reader :received_data def init(all_data) @@ -537,7 +537,7 @@ def init(all_data) end end - class FakeDataSource + class FakeUpdateProcessor def initialize(store, data) @store = store @data = data @@ -558,11 +558,11 @@ def initialized? end end - it "passes data set to data store in correct order on init" do - store = FakeDataStore.new - data_source_factory = lambda { |sdk_key, config| FakeDataSource.new(config.data_store, + it "passes data set to feature store in correct order on init" do + store = FakeFeatureStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, dependency_ordering_test_data) } - config = LaunchDarkly::Config.new(send_events: false, data_store: store, data_source: data_source_factory) + config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) client = subject.new("secret", config) data = store.received_data diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index f91ddc62..ca36364c 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -6,7 +6,7 @@ let(:requestor) { double() } def with_processor(store) - config = LaunchDarkly::Config.new(data_store: store, logger: $null_log) + config = LaunchDarkly::Config.new(feature_store: store, logger: $null_log) processor = subject.new(config, requestor) begin yield processor @@ -29,7 +29,7 @@ def with_processor(store) it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryDataStore.new + store = LaunchDarkly::InMemoryFeatureStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -40,7 +40,7 @@ def with_processor(store) it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryDataStore.new + store = LaunchDarkly::InMemoryFeatureStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -53,7 +53,7 @@ def with_processor(store) describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - store = LaunchDarkly::InMemoryDataStore.new + store = LaunchDarkly::InMemoryFeatureStore.new with_processor(store) do |processor| ready = processor.start finished = ready.wait(0.2) @@ -67,7 +67,7 @@ def with_processor(store) describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be true @@ -77,7 +77,7 @@ def verify_unrecoverable_http_error(status) def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be false @@ -108,7 +108,7 @@ def verify_recoverable_http_error(status) describe 'stop' do it 'stops promptly rather than continuing to wait for poll interval' do - with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| sleep(1) # somewhat arbitrary, but should ensure that it has started polling start_time = Time.now processor.stop diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 6ca3a4f5..6dd5733e 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "data_store_spec_base" +require "feature_store_spec_base" require "connection_pool" require "json" require "redis" @@ -13,11 +13,11 @@ } def create_redis_store(opts = {}) - LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 60 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 0 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) end def clear_all_data @@ -26,19 +26,19 @@ def clear_all_data end -describe LaunchDarkly::RedisDataStore do - subject { LaunchDarkly::RedisDataStore } +describe LaunchDarkly::RedisFeatureStore do + subject { LaunchDarkly::RedisFeatureStore } break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "data_store", method(:create_redis_store), method(:clear_all_data) + include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "data_store", method(:create_redis_store_uncached), method(:clear_all_data) + include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index de36ae0a..39c678c4 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -15,26 +15,26 @@ it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") - expect(config.data_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.data_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) processor.send(:process_message, delete_flag_message) - expect(config.data_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do processor.send(:process_message, patch_seg_message) processor.send(:process_message, delete_seg_message) - expect(config.data_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn From 97d2ca0d76d1ef767dab64317f59e43294e0aec7 Mon Sep 17 00:00:00 2001 From: hroederld Date: Fri, 22 Jan 2021 14:15:58 -0800 Subject: [PATCH 167/292] [ch92483] Use http gem and add socket factory support (#142) --- .circleci/config.yml | 1 + Gemfile.lock | 36 +++++++--- launchdarkly-server-sdk.gemspec | 3 +- lib/ldclient-rb/config.rb | 12 ++++ lib/ldclient-rb/events.rb | 5 +- lib/ldclient-rb/impl/event_sender.rb | 96 +++++++++++++++----------- lib/ldclient-rb/impl/unbounded_pool.rb | 34 +++++++++ lib/ldclient-rb/requestor.rb | 29 ++++---- lib/ldclient-rb/stream.rb | 3 +- lib/ldclient-rb/util.rb | 20 +++--- spec/event_sender_spec.rb | 22 +++++- spec/http_util.rb | 12 +++- spec/ldclient_end_to_end_spec.rb | 34 +++++++++ 13 files changed, 228 insertions(+), 79 deletions(-) create mode 100644 lib/ldclient-rb/impl/unbounded_pool.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index f976071f..ef162444 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,6 +16,7 @@ ruby-docker-template: &ruby-docker-template - run: | if [[ $CIRCLE_JOB == test-jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI + sudo apt-get update -y && sudo apt-get install -y build-essential fi - run: ruby -v - run: gem install bundler -v 1.17.3 diff --git a/Gemfile.lock b/Gemfile.lock index 81bd5ac1..1b634bf4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -3,13 +3,16 @@ PATH specs: launchdarkly-server-sdk (5.8.2) concurrent-ruby (~> 1.0) + http (~> 4.4.1) json (~> 2.3.1) - ld-eventsource (= 1.0.3) + ld-eventsource (= 2.0.0.pre.beta.1) semantic (~> 1.6) GEM remote: https://rubygems.org/ specs: + addressable (2.7.0) + public_suffix (>= 2.0.2, < 5.0) aws-eventstream (1.1.0) aws-partitions (1.388.0) aws-sdk-core (3.109.1) @@ -22,28 +25,42 @@ GEM aws-sigv4 (~> 1.1) aws-sigv4 (1.2.2) aws-eventstream (~> 1, >= 1.0.2) - concurrent-ruby (1.1.7) + concurrent-ruby (1.1.8) connection_pool (2.2.3) deep_merge (1.2.1) diff-lcs (1.4.4) diplomat (2.4.2) deep_merge (~> 1.0, >= 1.0.1) faraday (>= 0.9, < 1.1.0) + domain_name (0.5.20190701) + unf (>= 0.0.5, < 1.0.0) faraday (0.17.3) multipart-post (>= 1.2, < 3) ffi (1.12.0) - hitimes (1.3.1) - http_tools (0.4.5) + ffi-compiler (1.0.1) + ffi (>= 1.0.0) + rake + http (4.4.1) + addressable (~> 2.3) + http-cookie (~> 1.0) + http-form_data (~> 2.2) + http-parser (~> 1.2.0) + http-cookie (1.0.3) + domain_name (~> 0.5) + http-form_data (2.3.0) + http-parser (1.2.3) + ffi-compiler (>= 1.0, < 2.0) jmespath (1.4.0) json (2.3.1) - ld-eventsource (1.0.3) + ld-eventsource (2.0.0.pre.beta.1) concurrent-ruby (~> 1.0) - http_tools (~> 0.4.5) - socketry (~> 0.5.1) + http (~> 4.4.1) listen (3.2.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) multipart-post (2.1.1) + public_suffix (4.0.6) + rake (13.0.3) rb-fsevent (0.10.4) rb-inotify (0.10.1) ffi (~> 1.0) @@ -64,9 +81,10 @@ GEM rspec_junit_formatter (0.3.0) rspec-core (>= 2, < 4, != 2.12.0) semantic (1.6.1) - socketry (0.5.1) - hitimes (~> 1.2) timecop (0.9.2) + unf (0.1.4) + unf_ext + unf_ext (0.0.7.7) PLATFORMS ruby diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index d2d80678..1726f5af 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -38,9 +38,10 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", "1.0.3" + spec.add_runtime_dependency "ld-eventsource", "2.0.0.pre.beta.1" # lock json to 2.3.x as ruby libraries often remove # support for older ruby versions in minor releases spec.add_runtime_dependency "json", "~> 2.3.1" + spec.add_runtime_dependency "http", "~> 4.4.1" end diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index df0c73b4..edb21924 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -41,6 +41,7 @@ class Config # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. # @option opts [String] :wrapper_name See {#wrapper_name}. # @option opts [String] :wrapper_version See {#wrapper_version}. + # @option opts [#open] :socket_factory See {#socket_factory}. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -71,6 +72,7 @@ def initialize(opts = {}) opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval @wrapper_name = opts[:wrapper_name] @wrapper_version = opts[:wrapper_version] + @socket_factory = opts[:socket_factory] end # @@ -305,6 +307,16 @@ def diagnostic_opt_out? # attr_reader :wrapper_version + # + # The factory used to construct sockets for HTTP operations. The factory must + # provide the method `open(uri, timeout)`. The `open` method must return a + # connected stream that implements the `IO` class, such as a `TCPSocket`. + # + # Defaults to nil. + # @return [#open] + # + attr_reader :socket_factory + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index a5352a0b..2e26e1fa 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -238,10 +238,7 @@ def do_shutdown(flush_workers, diagnostic_event_workers) diagnostic_event_workers.shutdown diagnostic_event_workers.wait_for_termination end - begin - @client.finish - rescue - end + @event_sender.stop if @event_sender.respond_to?(:stop) end def synchronize_for_testing(flush_workers, diagnostic_event_workers) diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index f6da0843..442af033 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -1,4 +1,7 @@ +require "ldclient-rb/impl/unbounded_pool" + require "securerandom" +require "http" module LaunchDarkly module Impl @@ -9,62 +12,75 @@ class EventSender DEFAULT_RETRY_INTERVAL = 1 def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETRY_INTERVAL) - @client = http_client ? http_client : LaunchDarkly::Util.new_http_client(config.events_uri, config) @sdk_key = sdk_key @config = config @events_uri = config.events_uri + "/bulk" @diagnostic_uri = config.events_uri + "/diagnostic" @logger = config.logger @retry_interval = retry_interval + @http_client_pool = UnboundedPool.new( + lambda { LaunchDarkly::Util.new_http_client(@config.events_uri, @config) }, + lambda { |client| client.close }) + end + + def stop + @http_client_pool.dispose_all() end def send_event_data(event_data, description, is_diagnostic) uri = is_diagnostic ? @diagnostic_uri : @events_uri payload_id = is_diagnostic ? nil : SecureRandom.uuid - res = nil - (0..1).each do |attempt| - if attempt > 0 - @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } - sleep(@retry_interval) - end - begin - @client.start if !@client.started? - @logger.debug { "[LDClient] sending #{description}: #{event_data}" } - req = Net::HTTP::Post.new(uri) - req.content_type = "application/json" - req.body = event_data - Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } - if !is_diagnostic - req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req["X-LaunchDarkly-Payload-ID"] = payload_id + begin + http_client = @http_client_pool.acquire() + response = nil + (0..1).each do |attempt| + if attempt > 0 + @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } + sleep(@retry_interval) end - req["Connection"] = "keep-alive" - res = @client.request(req) - rescue StandardError => exn - @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } - next - end - status = res.code.to_i - if status >= 200 && status < 300 - res_time = nil - if !res["date"].nil? - begin - res_time = Time.httpdate(res["date"]) - rescue ArgumentError + begin + @logger.debug { "[LDClient] sending #{description}: #{event_data}" } + headers = {} + headers["content-type"] = "application/json" + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } + if !is_diagnostic + headers["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + headers["X-LaunchDarkly-Payload-ID"] = payload_id end + response = http_client.request("POST", uri, { + headers: headers, + body: event_data + }) + rescue StandardError => exn + @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } + next + end + status = response.status.code + # must fully read body for persistent connections + body = response.to_s + if status >= 200 && status < 300 + res_time = nil + if !response.headers["date"].nil? + begin + res_time = Time.httpdate(response.headers["date"]) + rescue ArgumentError + end + end + return EventSenderResult.new(true, false, res_time) + end + must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) + can_retry = !must_shutdown && attempt == 0 + message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") + @logger.error { "[LDClient] #{message}" } + if must_shutdown + return EventSenderResult.new(false, true, nil) end - return EventSenderResult.new(true, false, res_time) - end - must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) - can_retry = !must_shutdown && attempt == 0 - message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") - @logger.error { "[LDClient] #{message}" } - if must_shutdown - return EventSenderResult.new(false, true, nil) end + # used up our retries + return EventSenderResult.new(false, false, nil) + ensure + @http_client_pool.release(http_client) end - # used up our retries - return EventSenderResult.new(false, false, nil) end end end diff --git a/lib/ldclient-rb/impl/unbounded_pool.rb b/lib/ldclient-rb/impl/unbounded_pool.rb new file mode 100644 index 00000000..55bd515f --- /dev/null +++ b/lib/ldclient-rb/impl/unbounded_pool.rb @@ -0,0 +1,34 @@ +module LaunchDarkly + module Impl + # A simple thread safe generic unbounded resource pool abstraction + class UnboundedPool + def initialize(instance_creator, instance_destructor) + @pool = Array.new + @lock = Mutex.new + @instance_creator = instance_creator + @instance_destructor = instance_destructor + end + + def acquire + @lock.synchronize { + if @pool.length == 0 + @instance_creator.call() + else + @pool.pop() + end + } + end + + def release(instance) + @lock.synchronize { @pool.push(instance) } + end + + def dispose_all + @lock.synchronize { + @pool.map { |instance| @instance_destructor.call(instance) } if !@instance_destructor.nil? + @pool.clear() + } + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 71399bbf..35c5e365 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -3,6 +3,7 @@ require "concurrent/atomics" require "json" require "uri" +require "http" module LaunchDarkly # @private @@ -24,7 +25,7 @@ class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Util.new_http_client(@config.base_uri, @config) + @http_client = LaunchDarkly::Util.new_http_client(config.base_uri, config) @cache = @config.cache_store end @@ -35,7 +36,7 @@ def request_all_data() def stop begin - @client.finish + @http_client.close rescue end end @@ -47,19 +48,21 @@ def request_single_item(kind, path) end def make_request(path) - @client.start if !@client.started? uri = URI(@config.base_uri + path) - req = Net::HTTP::Get.new(uri) - Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } - req["Connection"] = "keep-alive" + headers = {} + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } + headers["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? - req["If-None-Match"] = cached.etag + headers["If-None-Match"] = cached.etag end - res = @client.request(req) - status = res.code.to_i - @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } - + response = @http_client.request("GET", uri, { + headers: headers + }) + status = response.status.code + @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{response.headers}\n\tbody: #{res.to_s}" } + # must fully read body for persistent connections + body = response.to_s if status == 304 && !cached.nil? body = cached.body else @@ -67,8 +70,8 @@ def make_request(path) if status < 200 || status >= 300 raise UnexpectedResponseError.new(status) end - body = fix_encoding(res.body, res["content-type"]) - etag = res["etag"] + body = fix_encoding(body, response.headers["content-type"]) + etag = response.headers["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end body diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index df50cfd0..64275b39 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -46,7 +46,8 @@ def start opts = { headers: headers, read_timeout: READ_TIMEOUT_SECONDS, - logger: @config.logger + logger: @config.logger, + socket_factory: @config.socket_factory } log_connection_started @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index e129c279..cfd09d8d 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,5 +1,5 @@ -require "net/http" require "uri" +require "http" module LaunchDarkly # @private @@ -18,14 +18,18 @@ def self.stringify_attrs(hash, attrs) end ret end - + def self.new_http_client(uri_s, config) - uri = URI(uri_s) - client = Net::HTTP.new(uri.hostname, uri.port) - client.use_ssl = true if uri.scheme == "https" - client.open_timeout = config.connect_timeout - client.read_timeout = config.read_timeout - client + http_client_options = {} + if config.socket_factory + http_client_options["socket_class"] = config.socket_factory + end + return HTTP::Client.new(http_client_options) + .timeout({ + read: config.read_timeout, + connect: config.connect_timeout + }) + .persistent(uri_s) end def self.log_exception(logger, message, exc) diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 0519aebb..5ad3f2f1 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -39,12 +39,29 @@ def with_sender_and_server "authorization" => [ sdk_key ], "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], - "x-launchdarkly-event-schema" => [ "3" ] + "x-launchdarkly-event-schema" => [ "3" ], + "connection" => [ "Keep-Alive" ] }) expect(req.header['x-launchdarkly-payload-id']).not_to eq [] end end - + + it "can use a socket factory" do + with_server do |server| + server.setup_ok_response("/bulk", "") + + config = Config.new(events_uri: "http://events.com/bulk", socket_factory: SocketFactoryFromHash.new({"events.com" => server.port}), logger: $null_log) + es = subject.new(sdk_key, config, nil, 0.1) + + result = es.send_event_data(fake_data, "", false) + + expect(result.success).to be true + req = server.await_request + expect(req.body).to eq fake_data + expect(req.host).to eq "events.com" + end + end + it "generates a new payload ID for each payload" do with_sender_and_server do |es, server| server.setup_ok_response("/bulk", "") @@ -78,6 +95,7 @@ def with_sender_and_server "authorization" => [ sdk_key ], "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "connection" => [ "Keep-Alive" ] }) expect(req.header['x-launchdarkly-event-schema']).to eq [] expect(req.header['x-launchdarkly-payload-id']).to eq [] diff --git a/spec/http_util.rb b/spec/http_util.rb index 27032589..1a789772 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -3,7 +3,7 @@ require "webrick/https" class StubHTTPServer - attr_reader :requests + attr_reader :requests, :port @@next_port = 50000 @@ -120,3 +120,13 @@ def with_server(server = nil) server.stop end end + +class SocketFactoryFromHash + def initialize(ports = {}) + @ports = ports + end + + def open(uri, timeout) + TCPSocket.new 'localhost', @ports[uri] + end +end \ No newline at end of file diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index b93a98b4..a820b608 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -80,6 +80,7 @@ module LaunchDarkly req, body = events_server.await_request_with_body expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data.length).to eq 1 expect(data[0]["kind"]).to eq "identify" @@ -111,6 +112,7 @@ module LaunchDarkly req = req0.path == "/diagnostic" ? req0 : req1 body = req0.path == "/diagnostic" ? body0 : body1 expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data["kind"]).to eq "diagnostic-init" end @@ -118,6 +120,38 @@ module LaunchDarkly end end + it "can use socket factory" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = Config.new( + stream: false, + base_uri: "http://polling.com", + events_uri: "http://events.com", + diagnostic_opt_out: true, + logger: NullLogger.new, + socket_factory: SocketFactoryFromHash.new({ + "polling.com" => poll_server.port, + "events.com" => events_server.port + }) + ) + with_client(config) do |client| + client.identify(USER) + client.flush + + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" + end + end + end + end + # TODO: TLS tests with self-signed cert end end From c7690118dc432df274f99db5ed2b3a4518022a54 Mon Sep 17 00:00:00 2001 From: Elliot <35050275+Apache-HB@users.noreply.github.com> Date: Mon, 25 Jan 2021 17:15:28 -0500 Subject: [PATCH 168/292] update dependencies and add CI for ruby 3 (#141) --- .circleci/config.yml | 18 ++--- CONTRIBUTING.md | 2 +- Gemfile.lock | 73 +++++++++++-------- README.md | 4 +- azure-pipelines.yml | 2 +- launchdarkly-server-sdk.gemspec | 30 ++++---- .../impl/integrations/redis_impl.rb | 4 +- spec/launchdarkly-server-sdk_spec.rb | 2 +- 8 files changed, 70 insertions(+), 65 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ef162444..6e7dd560 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,10 +4,10 @@ workflows: version: 2 test: jobs: - - test-2.4 - test-2.5 - test-2.6 - test-2.7 + - test-3.0 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template sudo apt-get update -y && sudo apt-get install -y build-essential fi - run: ruby -v - - run: gem install bundler -v 1.17.3 + - run: gem install bundler - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -29,13 +29,6 @@ ruby-docker-template: &ruby-docker-template path: ./rspec jobs: - test-2.4: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.4 - - image: consul - - image: redis - - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: @@ -57,6 +50,13 @@ jobs: - image: consul - image: redis - image: amazon/dynamodb-local + test-3.0: + <<: *ruby-docker-template + docker: + - image: circleci/ruby:3.0 + - image: consul + - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ac126eec..fb244f5c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,7 @@ Build instructions ### Prerequisites -This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. +This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler`. You might need `sudo` to execute the command successfully. To install the runtime dependencies: diff --git a/Gemfile.lock b/Gemfile.lock index 1b634bf4..f47034a8 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,7 +2,7 @@ PATH remote: . specs: launchdarkly-server-sdk (5.8.2) - concurrent-ruby (~> 1.0) + concurrent-ruby (~> 1.1) http (~> 4.4.1) json (~> 2.3.1) ld-eventsource (= 2.0.0.pre.beta.1) @@ -13,14 +13,16 @@ GEM specs: addressable (2.7.0) public_suffix (>= 2.0.2, < 5.0) + ansi (1.5.0) + ast (2.4.2) aws-eventstream (1.1.0) - aws-partitions (1.388.0) - aws-sdk-core (3.109.1) + aws-partitions (1.418.0) + aws-sdk-core (3.111.2) aws-eventstream (~> 1, >= 1.0.2) aws-partitions (~> 1, >= 1.239.0) aws-sigv4 (~> 1.1) jmespath (~> 1.0) - aws-sdk-dynamodb (1.55.0) + aws-sdk-dynamodb (1.58.0) aws-sdk-core (~> 3, >= 3.109.0) aws-sigv4 (~> 1.1) aws-sigv4 (1.2.2) @@ -34,9 +36,9 @@ GEM faraday (>= 0.9, < 1.1.0) domain_name (0.5.20190701) unf (>= 0.0.5, < 1.0.0) - faraday (0.17.3) + faraday (1.0.1) multipart-post (>= 1.2, < 3) - ffi (1.12.0) + ffi (1.14.2) ffi-compiler (1.0.1) ffi (>= 1.0.0) rake @@ -55,53 +57,60 @@ GEM ld-eventsource (2.0.0.pre.beta.1) concurrent-ruby (~> 1.0) http (~> 4.4.1) - listen (3.2.1) + listen (3.4.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) multipart-post (2.1.1) + oga (2.15) + ast + ruby-ll (~> 2.1) public_suffix (4.0.6) rake (13.0.3) rb-fsevent (0.10.4) rb-inotify (0.10.1) ffi (~> 1.0) - redis (3.3.5) - rspec (3.9.0) - rspec-core (~> 3.9.0) - rspec-expectations (~> 3.9.0) - rspec-mocks (~> 3.9.0) - rspec-core (3.9.3) - rspec-support (~> 3.9.3) - rspec-expectations (3.9.3) + redis (4.2.5) + rspec (3.10.0) + rspec-core (~> 3.10.0) + rspec-expectations (~> 3.10.0) + rspec-mocks (~> 3.10.0) + rspec-core (3.10.1) + rspec-support (~> 3.10.0) + rspec-expectations (3.10.1) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.9.0) - rspec-mocks (3.9.1) + rspec-support (~> 3.10.0) + rspec-mocks (3.10.1) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.9.0) - rspec-support (3.9.4) - rspec_junit_formatter (0.3.0) + rspec-support (~> 3.10.0) + rspec-support (3.10.1) + rspec_junit_formatter (0.4.1) rspec-core (>= 2, < 4, != 2.12.0) + ruby-ll (2.1.2) + ansi + ast semantic (1.6.1) timecop (0.9.2) unf (0.1.4) unf_ext unf_ext (0.0.7.7) + webrick (1.7.0) PLATFORMS ruby DEPENDENCIES - aws-sdk-dynamodb (~> 1.18) - bundler (~> 1.17) - connection_pool (>= 2.1.2) - diplomat (>= 2.0.2) - faraday (~> 0.17) - ffi (<= 1.12) + aws-sdk-dynamodb (~> 1.57) + bundler (~> 2.1) + connection_pool (~> 2.2.3) + diplomat (~> 2.4.2) launchdarkly-server-sdk! - listen (~> 3.0) - redis (~> 3.3.5) - rspec (~> 3.2) - rspec_junit_formatter (~> 0.3.0) - timecop (~> 0.9.1) + listen (~> 3.3) + oga (~> 2.2) + redis (~> 4.2) + rspec (~> 3.10) + rspec_junit_formatter (~> 0.4) + timecop (~> 0.9) + webrick (~> 1.7) BUNDLED WITH - 1.17.3 + 2.2.3 diff --git a/README.md b/README.md index 2a61c06c..ef8c0e33 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ LaunchDarkly overview Supported Ruby versions ----------------------- -This version of the LaunchDarkly SDK has a minimum Ruby version of 2.3.0, or 9.2.0 for JRuby. +This version of the LaunchDarkly SDK has a minimum Ruby version of 2.5.0, or 9.2.0 for JRuby. Getting started ----------- @@ -55,4 +55,4 @@ About LaunchDarkly * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies \ No newline at end of file + * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 3d3fd98a..88296f02 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,7 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v - gem install bundler -v 1.17.3 + gem install bundler bundle install mkdir rspec bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 1726f5af..411ba4c1 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -19,25 +19,23 @@ Gem::Specification.new do |spec| spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] - spec.required_ruby_version = ">= 2.4.0" + spec.required_ruby_version = ">= 2.5.0" - spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" - spec.add_development_dependency "bundler", "~> 1.17" - spec.add_development_dependency "rspec", "~> 3.2" - spec.add_development_dependency "diplomat", ">= 2.0.2" - spec.add_development_dependency "redis", "~> 3.3.5" - spec.add_development_dependency "connection_pool", ">= 2.1.2" - spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" - spec.add_development_dependency "timecop", "~> 0.9.1" - spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb - # these are transitive dependencies of listen and consul respectively - # we constrain them here to make sure the ruby 2.2, 2.3, and 2.4 CI - # cases all pass - spec.add_development_dependency "ffi", "<= 1.12" # >1.12 doesnt support ruby 2.2 - spec.add_development_dependency "faraday", "~> 0.17" # >=0.18 doesnt support ruby 2.2 + spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" + spec.add_development_dependency "bundler", "~> 2.1" + spec.add_development_dependency "rspec", "~> 3.10" + spec.add_development_dependency "diplomat", "~> 2.4.2" + spec.add_development_dependency "redis", "~> 4.2" + spec.add_development_dependency "connection_pool", "~> 2.2.3" + spec.add_development_dependency "rspec_junit_formatter", "~> 0.4" + spec.add_development_dependency "timecop", "~> 0.9" + spec.add_development_dependency "listen", "~> 3.3" # see file_data_source.rb + spec.add_development_dependency "webrick", "~> 1.7" + # required by dynamodb + spec.add_development_dependency "oga", "~> 2.2" spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" + spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" spec.add_runtime_dependency "ld-eventsource", "2.0.0.pre.beta.1" # lock json to 2.3.x as ruby libraries often remove diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index a4cb1365..f948e54a 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -114,9 +114,7 @@ def upsert_internal(kind, new_item) end def initialized_internal? - with_connection do |redis| - redis.respond_to?(:exists?) ? redis.exists?(inited_key) : redis.exists(inited_key) - end + with_connection { |redis| redis.exists?(inited_key) } end def stop diff --git a/spec/launchdarkly-server-sdk_spec.rb b/spec/launchdarkly-server-sdk_spec.rb index b594dac8..6dfa4808 100644 --- a/spec/launchdarkly-server-sdk_spec.rb +++ b/spec/launchdarkly-server-sdk_spec.rb @@ -4,7 +4,7 @@ describe LaunchDarkly do it "can be automatically loaded by Bundler.require" do ldclient_loaded = - Bundler.with_clean_env do + Bundler.with_unbundled_env do Kernel.system("ruby", "./spec/launchdarkly-server-sdk_spec_autoloadtest.rb") end From 91692ca4c33f132b02bd0d991ed4369189f45dec Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Tue, 26 Jan 2021 10:57:17 -0800 Subject: [PATCH 169/292] reference eventsource 2.0 in gemspec --- Gemfile.lock | 4 ++-- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index f47034a8..632f9dcf 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -5,7 +5,7 @@ PATH concurrent-ruby (~> 1.1) http (~> 4.4.1) json (~> 2.3.1) - ld-eventsource (= 2.0.0.pre.beta.1) + ld-eventsource (~> 2.0) semantic (~> 1.6) GEM @@ -54,7 +54,7 @@ GEM ffi-compiler (>= 1.0, < 2.0) jmespath (1.4.0) json (2.3.1) - ld-eventsource (2.0.0.pre.beta.1) + ld-eventsource (2.0.0) concurrent-ruby (~> 1.0) http (~> 4.4.1) listen (3.4.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 411ba4c1..dcf281fe 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,7 +36,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" - spec.add_runtime_dependency "ld-eventsource", "2.0.0.pre.beta.1" + spec.add_runtime_dependency "ld-eventsource", "~> 2.0" # lock json to 2.3.x as ruby libraries often remove # support for older ruby versions in minor releases From ad0c4f24a8363a9a08df5aa331629c9651c8d5c4 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Tue, 26 Jan 2021 10:58:51 -0800 Subject: [PATCH 170/292] add 5.x releasable branch for releaser --- .ldrelease/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index f758fcaf..4f3d0b67 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -2,6 +2,10 @@ repo: public: ruby-server-sdk private: ruby-server-sdk-private +releasableBranches: + - name: master + - name: 5.x + publications: - url: https://rubygems.org/gems/launchdarkly-server-sdk description: RubyGems From 567f54e1e0e3a1f1cb5289e7fd0d51fcf5f99984 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Jan 2021 13:29:03 -0800 Subject: [PATCH 171/292] use Ruby 2.6.6 in releases --- .ldrelease/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index f148c89e..c1fcca80 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -17,7 +17,7 @@ template: circleci: linux: - image: circleci/ruby:2.6.2-stretch + image: circleci/ruby:2.6.6-buster context: org-global env: LD_SKIP_DATABASE_TESTS: "1" # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI From efec41fdea7d145aebb946ff94b9cc1c667b4665 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Wed, 3 Feb 2021 15:07:52 -0800 Subject: [PATCH 172/292] Removed the guides link --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index ef8c0e33..2f7b01c6 100644 --- a/README.md +++ b/README.md @@ -55,4 +55,3 @@ About LaunchDarkly * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies From 7601ec78fcd73292af54831d2f7c8588b38c0e12 Mon Sep 17 00:00:00 2001 From: hroederld Date: Thu, 4 Feb 2021 12:29:37 -0800 Subject: [PATCH 173/292] [ch99757] add alias method (#147) --- Gemfile.lock | 2 +- azure-pipelines.yml | 2 +- lib/ldclient-rb/events.rb | 2 ++ lib/ldclient-rb/impl/event_factory.rb | 22 ++++++++++++ lib/ldclient-rb/ldclient.rb | 17 +++++++++ spec/events_spec.rb | 10 ++++++ spec/ldclient_spec.rb | 50 +++++++++++++++++++++++++++ 7 files changed, 103 insertions(+), 2 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 632f9dcf..ff4cdf63 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - launchdarkly-server-sdk (5.8.2) + launchdarkly-server-sdk (6.0.0) concurrent-ruby (~> 1.1) http (~> 4.4.1) json (~> 2.3.1) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 88296f02..cb66e704 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,7 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v - gem install bundler + gem install bundler:2.2.7 bundle install mkdir rspec bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 2e26e1fa..c59db7d0 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -439,6 +439,7 @@ def make_output_event(event) out[:variation] = event[:variation] if event.has_key?(:variation) out[:version] = event[:version] if event.has_key?(:version) out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf) + out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) if @inline_users || is_debug out[:user] = process_user(event) else @@ -466,6 +467,7 @@ def make_output_event(event) out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) + out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) out when "index" { diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 2e7d2697..256eea98 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -28,6 +28,7 @@ def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil? e[:reason] = detail.reason if add_experiment_data || @with_reasons + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end @@ -43,6 +44,7 @@ def new_default_event(flag, user, default_value, reason) e[:trackEvents] = true if flag[:trackEvents] e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] e[:reason] = reason if @with_reasons + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end @@ -55,6 +57,7 @@ def new_unknown_flag_event(key, user, default_value, reason) default: default_value } e[:reason] = reason if @with_reasons + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end @@ -66,6 +69,16 @@ def new_identify_event(user) } end + def new_alias_event(current_context, previous_context) + { + kind: 'alias', + key: current_context[:key], + contextKind: context_to_context_kind(current_context), + previousKey: previous_context[:key], + previousContextKind: context_to_context_kind(previous_context) + } + end + def new_custom_event(event_name, user, data, metric_value) e = { kind: 'custom', @@ -74,11 +87,20 @@ def new_custom_event(event_name, user, data, metric_value) } e[:data] = data if !data.nil? e[:metricValue] = metric_value if !metric_value.nil? + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end private + def context_to_context_kind(user) + if !user.nil? && user[:anonymous] + return "anonymousUser" + else + return "user" + end + end + def is_experiment(flag, reason) return false if !reason case reason[:kind] diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index cfa63351..5d803ef3 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -282,6 +282,23 @@ def track(event_name, user, data = nil, metric_value = nil) @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data, metric_value)) end + # + # Associates a new and old user object for analytics purposes via an alias event. + # + # @param current_context [Hash] The current version of a user. + # @param previous_context [Hash] The previous version of a user. + # @return [void] + # + def alias(current_context, previous_context) + if !current_context || current_context[:key].nil? || !previous_context || previous_context[:key].nil? + @config.logger.warn("Alias called with nil user or nil user key!") + return + end + sanitize_user(current_context) + sanitize_user(previous_context) + @event_processor.add_event(@event_factory_default.new_alias_event(current_context, previous_context)) + end + # # Returns all feature flag values for the given user. # diff --git a/spec/events_spec.rb b/spec/events_spec.rb index d7854567..e9a6d6ff 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -408,6 +408,16 @@ def with_processor_and_sender(config) end end + it "queues alias event" do + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "alias", key: "a", contextKind: "user", previousKey: "b", previousContextKind: "user" } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly(e) + end + end + it "treats nil value for custom the same as an empty hash" do with_processor_and_sender(default_config) do |ep, sender| user_with_nil_custom = { key: "userkey", custom: nil } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 76e5b0f7..f7d215e2 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -25,6 +25,12 @@ } } end + let(:user_anonymous) do + { + key: "anonymous@test.com", + anonymous: true + } + end let(:numeric_key_user) do { key: 33, @@ -155,6 +161,24 @@ def event_processor client.variation("key", nil, "default") end + it "queues a feature event for an existing feature when user is anonymous" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", + key: "key", + version: 100, + contextKind: "anonymousUser", + user: user_anonymous, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000 + )) + client.variation("key", user_anonymous, "default") + end + it "queues a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) @@ -455,6 +479,12 @@ def event_processor client.track("custom_event_name", user, nil, 1.5) end + it "includes contextKind with anonymous user" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: user_anonymous, metricValue: 2.2, contextKind: "anonymousUser")) + client.track("custom_event_name", user_anonymous, nil, 2.2) + end + it "sanitizes the user in the event" do expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) @@ -473,6 +503,26 @@ def event_processor end end + describe '#alias' do + it "queues up an alias event" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "alias", key: user[:key], contextKind: "user", previousKey: user_anonymous[:key], previousContextKind: "anonymousUser")) + client.alias(user, user_anonymous) + end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias(nil, nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias(user_without_key, user_without_key) + end + end + describe '#identify' do it "queues up an identify event" do expect(event_processor).to receive(:add_event).with(hash_including(kind: "identify", key: user[:key], user: user)) From 162c596aa8ac6d90ca7327387a3adf776ea299ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 5 Feb 2021 18:20:08 -0800 Subject: [PATCH 174/292] don't send event for nil user evaluation --- lib/ldclient-rb/ldclient.rb | 13 ++++++------- spec/ldclient_spec.rb | 22 +++++++++++----------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5d803ef3..37d80e9a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -401,6 +401,12 @@ def evaluate_internal(key, user, default, event_factory) return Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) end + unless user + @config.logger.error { "[LDClient] Must specify user" } + detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) + return detail + end + if !initialized? if @store.initialized? @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } @@ -421,13 +427,6 @@ def evaluate_internal(key, user, default, event_factory) return detail end - unless user - @config.logger.error { "[LDClient] Must specify user" } - detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) - @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) - return detail - end - begin res = @evaluator.evaluate(feature, user, event_factory) if !res.events.nil? diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index f7d215e2..cad4c03c 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -145,19 +145,11 @@ def event_processor client.variation("key", user, "default") end - it "queues a feature event for an existing feature when user is nil" do + it "does not send an event if user is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: nil, - value: "default", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:error) client.variation("key", nil, "default") end @@ -313,6 +305,14 @@ def event_processor )) client.variation_detail("key", user, "default") end + + it "does not send an event if user is nil" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:error) + client.variation_detail("key", nil, "default") + end end describe '#all_flags' do From 3b557096a8d4c8fcdbc12cb7f35f942df1f7f352 Mon Sep 17 00:00:00 2001 From: hroederld Date: Fri, 5 Feb 2021 18:22:51 -0800 Subject: [PATCH 175/292] remove lockfile (#148) --- .gitignore | 1 + Gemfile.lock | 116 -------------------------------------------- azure-pipelines.yml | 2 +- 3 files changed, 2 insertions(+), 117 deletions(-) delete mode 100644 Gemfile.lock diff --git a/.gitignore b/.gitignore index 9e998e64..d327dbe8 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ mkmf.log *.gem .DS_Store +Gemfile.lock \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock deleted file mode 100644 index ff4cdf63..00000000 --- a/Gemfile.lock +++ /dev/null @@ -1,116 +0,0 @@ -PATH - remote: . - specs: - launchdarkly-server-sdk (6.0.0) - concurrent-ruby (~> 1.1) - http (~> 4.4.1) - json (~> 2.3.1) - ld-eventsource (~> 2.0) - semantic (~> 1.6) - -GEM - remote: https://rubygems.org/ - specs: - addressable (2.7.0) - public_suffix (>= 2.0.2, < 5.0) - ansi (1.5.0) - ast (2.4.2) - aws-eventstream (1.1.0) - aws-partitions (1.418.0) - aws-sdk-core (3.111.2) - aws-eventstream (~> 1, >= 1.0.2) - aws-partitions (~> 1, >= 1.239.0) - aws-sigv4 (~> 1.1) - jmespath (~> 1.0) - aws-sdk-dynamodb (1.58.0) - aws-sdk-core (~> 3, >= 3.109.0) - aws-sigv4 (~> 1.1) - aws-sigv4 (1.2.2) - aws-eventstream (~> 1, >= 1.0.2) - concurrent-ruby (1.1.8) - connection_pool (2.2.3) - deep_merge (1.2.1) - diff-lcs (1.4.4) - diplomat (2.4.2) - deep_merge (~> 1.0, >= 1.0.1) - faraday (>= 0.9, < 1.1.0) - domain_name (0.5.20190701) - unf (>= 0.0.5, < 1.0.0) - faraday (1.0.1) - multipart-post (>= 1.2, < 3) - ffi (1.14.2) - ffi-compiler (1.0.1) - ffi (>= 1.0.0) - rake - http (4.4.1) - addressable (~> 2.3) - http-cookie (~> 1.0) - http-form_data (~> 2.2) - http-parser (~> 1.2.0) - http-cookie (1.0.3) - domain_name (~> 0.5) - http-form_data (2.3.0) - http-parser (1.2.3) - ffi-compiler (>= 1.0, < 2.0) - jmespath (1.4.0) - json (2.3.1) - ld-eventsource (2.0.0) - concurrent-ruby (~> 1.0) - http (~> 4.4.1) - listen (3.4.1) - rb-fsevent (~> 0.10, >= 0.10.3) - rb-inotify (~> 0.9, >= 0.9.10) - multipart-post (2.1.1) - oga (2.15) - ast - ruby-ll (~> 2.1) - public_suffix (4.0.6) - rake (13.0.3) - rb-fsevent (0.10.4) - rb-inotify (0.10.1) - ffi (~> 1.0) - redis (4.2.5) - rspec (3.10.0) - rspec-core (~> 3.10.0) - rspec-expectations (~> 3.10.0) - rspec-mocks (~> 3.10.0) - rspec-core (3.10.1) - rspec-support (~> 3.10.0) - rspec-expectations (3.10.1) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.10.0) - rspec-mocks (3.10.1) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.10.0) - rspec-support (3.10.1) - rspec_junit_formatter (0.4.1) - rspec-core (>= 2, < 4, != 2.12.0) - ruby-ll (2.1.2) - ansi - ast - semantic (1.6.1) - timecop (0.9.2) - unf (0.1.4) - unf_ext - unf_ext (0.0.7.7) - webrick (1.7.0) - -PLATFORMS - ruby - -DEPENDENCIES - aws-sdk-dynamodb (~> 1.57) - bundler (~> 2.1) - connection_pool (~> 2.2.3) - diplomat (~> 2.4.2) - launchdarkly-server-sdk! - listen (~> 3.3) - oga (~> 2.2) - redis (~> 4.2) - rspec (~> 3.10) - rspec_junit_formatter (~> 0.4) - timecop (~> 0.9) - webrick (~> 1.7) - -BUNDLED WITH - 2.2.3 diff --git a/azure-pipelines.yml b/azure-pipelines.yml index cb66e704..88296f02 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,7 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v - gem install bundler:2.2.7 + gem install bundler bundle install mkdir rspec bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From e7f4aaa4d76470855261397dc955fa2dc20f5227 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 5 Feb 2021 18:30:51 -0800 Subject: [PATCH 176/292] rm redundant nil check --- lib/ldclient-rb/events.rb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c59db7d0..7b77c4db 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -443,7 +443,7 @@ def make_output_event(event) if @inline_users || is_debug out[:user] = process_user(event) else - out[:userKey] = event[:user].nil? ? nil : event[:user][:key] + out[:userKey] = event[:user][:key] end out[:reason] = event[:reason] if !event[:reason].nil? out @@ -451,7 +451,7 @@ def make_output_event(event) { kind: "identify", creationDate: event[:creationDate], - key: event[:user].nil? ? nil : event[:user][:key].to_s, + key: event[:user][:key].to_s, user: process_user(event) } when "custom" @@ -464,7 +464,7 @@ def make_output_event(event) if @inline_users out[:user] = process_user(event) else - out[:userKey] = event[:user].nil? ? nil : event[:user][:key] + out[:userKey] = event[:user][:key] end out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) From 0452cd97ac3e78472a4fe0dc3646cff818af673b Mon Sep 17 00:00:00 2001 From: Kerrie Martinez Date: Fri, 14 May 2021 12:32:33 -0700 Subject: [PATCH 177/292] Experiment Allocation Changes (#150) * WIP - from sam's pairing session * starting sdk changes * adding tests and making sure everything works * adding more tests * removing the singleton for fallthrough * Revert "removing the singleton for fallthrough" This reverts commit dff7adbb809ecc63118d0fbff9742a88a039c679. * taking a different approach to keep things immutable * adding tests for untracked * remove unnecessary comment * making sure to return two values in all code paths Co-authored-by: pellyg-ld --- lib/ldclient-rb/evaluation_detail.rb | 45 ++++++-- lib/ldclient-rb/impl/evaluator.rb | 10 +- lib/ldclient-rb/impl/evaluator_bucketing.rb | 29 ++++-- lib/ldclient-rb/impl/event_factory.rb | 6 ++ spec/impl/evaluator_bucketing_spec.rb | 48 +++++++-- spec/impl/evaluator_rule_spec.rb | 32 ++++++ spec/impl/evaluator_spec.rb | 44 ++++++++ spec/impl/event_factory_spec.rb | 108 ++++++++++++++++++++ 8 files changed, 294 insertions(+), 28 deletions(-) create mode 100644 spec/impl/event_factory_spec.rb diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index bccaf133..dc2e6bab 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -120,6 +120,9 @@ class EvaluationReason # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`. attr_reader :rule_id + # A boolean or nil value representing if the rule or fallthrough has an experiment rollout. + attr_reader :in_experiment + # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not # {#PREREQUISITE_FAILED}, this will be `nil`. attr_reader :prerequisite_key @@ -136,8 +139,12 @@ def self.off # Returns an instance whose {#kind} is {#FALLTHROUGH}. # @return [EvaluationReason] - def self.fallthrough - @@fallthrough + def self.fallthrough(in_experiment=false) + if in_experiment + @@fallthrough_with_experiment + else + @@fallthrough + end end # Returns an instance whose {#kind} is {#TARGET_MATCH}. @@ -153,10 +160,16 @@ def self.target_match # @param rule_id [String] unique string identifier for the matched rule # @return [EvaluationReason] # @raise [ArgumentError] if `rule_index` is not a number or `rule_id` is not a string - def self.rule_match(rule_index, rule_id) + def self.rule_match(rule_index, rule_id, in_experiment=false) raise ArgumentError.new("rule_index must be a number") if !(rule_index.is_a? Numeric) raise ArgumentError.new("rule_id must be a string") if !rule_id.nil? && !(rule_id.is_a? String) # in test data, ID could be nil - new(:RULE_MATCH, rule_index, rule_id, nil, nil) + + if in_experiment + er = new(:RULE_MATCH, rule_index, rule_id, nil, nil, true) + else + er = new(:RULE_MATCH, rule_index, rule_id, nil, nil) + end + er end # Returns an instance whose {#kind} is {#PREREQUISITE_FAILED}. @@ -204,11 +217,17 @@ def to_s def inspect case @kind when :RULE_MATCH - "RULE_MATCH(#{@rule_index},#{@rule_id})" + if @in_experiment + "RULE_MATCH(#{@rule_index},#{@rule_id},#{@in_experiment})" + else + "RULE_MATCH(#{@rule_index},#{@rule_id})" + end when :PREREQUISITE_FAILED "PREREQUISITE_FAILED(#{@prerequisite_key})" when :ERROR "ERROR(#{@error_kind})" + when :FALLTHROUGH + @in_experiment ? "FALLTHROUGH(#{@in_experiment})" : @kind.to_s else @kind.to_s end @@ -225,11 +244,21 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json # as_json and then modify the result. case @kind when :RULE_MATCH - { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } + if @in_experiment + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, in_experiment: @in_experiment } + else + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } + end when :PREREQUISITE_FAILED { kind: @kind, prerequisiteKey: @prerequisite_key } when :ERROR { kind: @kind, errorKind: @error_kind } + when :FALLTHROUGH + if @in_experiment + { kind: @kind, in_experiment: @in_experiment } + else + { kind: @kind } + end else { kind: @kind } end @@ -263,7 +292,7 @@ def [](key) private - def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind) + def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil) @kind = kind.to_sym @rule_index = rule_index @rule_id = rule_id @@ -271,6 +300,7 @@ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind) @prerequisite_key = prerequisite_key @prerequisite_key.freeze if !prerequisite_key.nil? @error_kind = error_kind + @in_experiment = in_experiment end private_class_method :new @@ -279,6 +309,7 @@ def self.make_error(error_kind) new(:ERROR, nil, nil, nil, error_kind) end + @@fallthrough_with_experiment = new(:FALLTHROUGH, nil, nil, nil, nil, true) @@fallthrough = new(:FALLTHROUGH, nil, nil, nil, nil) @@off = new(:OFF, nil, nil, nil, nil) @@target_match = new(:TARGET_MATCH, nil, nil, nil, nil) diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index d441eb42..00898cd9 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -190,7 +190,7 @@ def segment_rule_match_user(rule, user, segment_key, salt) return true if !rule[:weight] # All of the clauses are met. See if the user buckets in - bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt) + bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) weight = rule[:weight].to_f / 100000.0 return bucket < weight end @@ -213,7 +213,13 @@ def get_off_value(flag, reason) end def get_value_for_variation_or_rollout(flag, vr, user, reason) - index = EvaluatorBucketing.variation_index_for_user(flag, vr, user) + index, in_experiment = EvaluatorBucketing.variation_index_for_user(flag, vr, user) + #if in experiment is true, set reason to a different reason instance/singleton with in_experiment set + if in_experiment && reason.kind == :FALLTHROUGH + reason = EvaluationReason::fallthrough(in_experiment) + elsif in_experiment && reason.kind == :RULE_MATCH + reason = EvaluationReason::rule_match(reason.rule_index, reason.rule_id, in_experiment) + end if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index b3d14ed1..f2f2075f 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -10,20 +10,29 @@ module EvaluatorBucketing # @param user [Object] the user properties # @return [Number] the variation index, or nil if there is an error def self.variation_index_for_user(flag, rule, user) + in_experiment = nil + variation = rule[:variation] - return variation if !variation.nil? # fixed variation + return variation, in_experiment if !variation.nil? # fixed variation rollout = rule[:rollout] - return nil if rollout.nil? + return nil, in_experiment if rollout.nil? variations = rollout[:variations] if !variations.nil? && variations.length > 0 # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] - bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) + + seed = rollout[:seed] + bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt], seed) # may not be present sum = 0; variations.each do |variate| + if rule[:rollout][:kind] == "experiment" && !variate[:untracked] + in_experiment = true + end + sum += variate[:weight].to_f / 100000.0 + if bucket < sum - return variate[:variation] + return variate[:variation], in_experiment end end # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due @@ -31,9 +40,9 @@ def self.variation_index_for_user(flag, rule, user) # data could contain buckets that don't actually add up to 100000. Rather than returning an error in # this case (or changing the scaling, which would potentially change the results for *all* users), we # will simply put the user in the last bucket. - variations[-1][:variation] + [ variations[-1][:variation], in_experiment ] else # the rule isn't well-formed - nil + [ nil, in_experiment ] end end @@ -44,7 +53,7 @@ def self.variation_index_for_user(flag, rule, user) # @param bucket_by [String|Symbol] the name of the user attribute to be used for bucketing # @param salt [String] the feature flag's or segment's salt value # @return [Number] the bucket value, from 0 inclusive to 1 exclusive - def self.bucket_user(user, key, bucket_by, salt) + def self.bucket_user(user, key, bucket_by, salt, seed) return nil unless user[:key] id_hash = bucketable_string_value(EvaluatorOperators.user_value(user, bucket_by)) @@ -56,7 +65,11 @@ def self.bucket_user(user, key, bucket_by, salt) id_hash += "." + user[:secondary].to_s end - hash_key = "%s.%s.%s" % [key, salt, id_hash] + if seed + hash_key = "%d.%s" % [seed, id_hash] + else + hash_key = "%s.%s.%s" % [key, salt, id_hash] + end hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 256eea98..691339d7 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -103,6 +103,11 @@ def context_to_context_kind(user) def is_experiment(flag, reason) return false if !reason + + if reason.in_experiment + return true + end + case reason[:kind] when 'RULE_MATCH' index = reason[:ruleIndex] @@ -115,6 +120,7 @@ def is_experiment(flag, reason) end false end + end end end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index a9c79b5c..35775838 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -4,17 +4,43 @@ subject { LaunchDarkly::Impl::EvaluatorBucketing } describe "bucket_user" do + describe "seed exists" do + let(:seed) { 61 } + it "gets the expected bucket values for seed" do + user = { key: "userKeyA" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + expect(bucket).to be_within(0.0000001).of(0.09801207); + + user = { key: "userKeyB" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + expect(bucket).to be_within(0.0000001).of(0.14483777); + + user = { key: "userKeyC" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + expect(bucket).to be_within(0.0000001).of(0.9242641); + end + + it "should return the same bucket if the seed and user is the same" do + user = { key: "userKeyA" } + bucket1 = subject.bucket_user(user, "hashKey", "bucket_by", "saltyA", seed) + bucket2 = subject.bucket_user(user, "hashKey1", "bucket_by", "saltyB", seed) + bucket3 = subject.bucket_user(user, "hashKey2", "bucket_by", "saltyC", seed) + expect(bucket1).to eq(bucket2) + expect(bucket2).to eq(bucket3) + end + end + it "gets expected bucket values for specific keys" do user = { key: "userKeyA" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.42157587); user = { key: "userKeyB" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.6708485); user = { key: "userKeyC" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.10343106); end @@ -26,8 +52,8 @@ intAttr: 33333 } } - stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA") - intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA") + stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA", nil) + intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA", nil) expect(intResult).to be_within(0.0000001).of(0.54771423) expect(intResult).to eq(stringResult) @@ -40,7 +66,7 @@ floatAttr: 33.5 } } - result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA") + result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA", nil) expect(result).to eq(0.0) end @@ -52,7 +78,7 @@ boolAttr: true } } - result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA") + result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA", nil) expect(result).to eq(0.0) end end @@ -65,7 +91,7 @@ # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_user(user, flag_key, "key", salt) * 100000).truncate() + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() expect(bucket_value).to be > 0 expect(bucket_value).to be < 100000 @@ -83,7 +109,7 @@ } flag = { key: flag_key, salt: salt } - result_variation = subject.variation_index_for_user(flag, rule, user) + result_variation, _ = subject.variation_index_for_user(flag, rule, user) expect(result_variation).to be matched_variation end @@ -92,7 +118,7 @@ flag_key = "flagkey" salt = "salt" - bucket_value = (subject.bucket_user(user, flag_key, "key", salt) * 100000).truncate() + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { @@ -104,7 +130,7 @@ } flag = { key: flag_key, salt: salt } - result_variation = subject.variation_index_for_user(flag, rule, user) + result_variation, _ = subject.variation_index_for_user(flag, rule, user) expect(result_variation).to be 0 end end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index a1ae5d66..8f6c207f 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -91,6 +91,38 @@ module Impl result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) end + + describe "experiment rollout behavior" do + it "sets the in_experiment value if rollout kind is experiment " do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(true) + end + + it "does not set the in_experiment value if rollout kind is not experiment " do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + + it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + end end end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index dcf8928b..4b0f3741 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -299,6 +299,50 @@ module Impl expect(result.detail).to eq(detail) expect(result.events).to eq(nil) end + + describe "experiment rollout behavior" do + it "sets the in_experiment value if rollout kind is experiment and untracked false" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(true) + end + + it "does not set the in_experiment value if rollout kind is not experiment" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + + it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + end end end end diff --git a/spec/impl/event_factory_spec.rb b/spec/impl/event_factory_spec.rb new file mode 100644 index 00000000..9da19de0 --- /dev/null +++ b/spec/impl/event_factory_spec.rb @@ -0,0 +1,108 @@ +require "spec_helper" + +describe LaunchDarkly::Impl::EventFactory do + subject { LaunchDarkly::Impl::EventFactory } + + describe "#new_eval_event" do + let(:event_factory_without_reason) { subject.new(false) } + let(:user) { { 'key': 'userA' } } + let(:rule_with_experiment_rollout) { + { id: 'ruleid', + clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + trackEvents: false, + rollout: { kind: 'experiment', salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ] } + } + } + + let(:rule_with_rollout) { + { id: 'ruleid', + trackEvents: false, + clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ] } + } + } + + let(:fallthrough_with_rollout) { + { rollout: { kind: 'rollout', salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ], trackEventsFallthrough: false } } + } + + let(:rule_reason) { LaunchDarkly::EvaluationReason::rule_match(0, 'ruleid') } + let(:rule_reason_with_experiment) { LaunchDarkly::EvaluationReason::rule_match(0, 'ruleid', true) } + let(:fallthrough_reason) { LaunchDarkly::EvaluationReason::fallthrough } + let(:fallthrough_reason_with_experiment) { LaunchDarkly::EvaluationReason::fallthrough(true) } + + context "in_experiment is true" do + it "sets the reason and trackevents: true for rules" do + flag = createFlag('rule', rule_with_experiment_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason_with_experiment) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("RULE_MATCH(0,ruleid,true)") + end + + it "sets the reason and trackevents: true for the fallthrough" do + fallthrough_with_rollout[:kind] = 'experiment' + flag = createFlag('fallthrough', fallthrough_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason_with_experiment) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("FALLTHROUGH(true)") + end + end + + context "in_experiment is false" do + it "sets the reason & trackEvents: true if rule has trackEvents set to true" do + rule_with_rollout[:trackEvents] = true + flag = createFlag('rule', rule_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("RULE_MATCH(0,ruleid)") + end + + it "sets the reason & trackEvents: true if fallthrough has trackEventsFallthrough set to true" do + flag = createFlag('fallthrough', fallthrough_with_rollout) + flag[:trackEventsFallthrough] = true + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("FALLTHROUGH") + end + + it "doesn't set the reason & trackEvents if rule has trackEvents set to false" do + flag = createFlag('rule', rule_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to be_nil + expect(r[:reason]).to be_nil + end + + it "doesn't set the reason & trackEvents if fallthrough has trackEventsFallthrough set to false" do + flag = createFlag('fallthrough', fallthrough_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to be_nil + expect(r[:reason]).to be_nil + end + + it "sets trackEvents true and doesn't set the reason if flag[:trackEvents] = true" do + flag = createFlag('fallthrough', fallthrough_with_rollout) + flag[:trackEvents] = true + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason]).to be_nil + end + end + end + + def createFlag(kind, rule) + if kind == 'rule' + { key: 'feature', on: true, rules: [rule], fallthrough: { variation: 0 }, variations: [ false, true ] } + elsif kind == 'fallthrough' + { key: 'feature', on: true, fallthrough: rule, variations: [ false, true ] } + else + { key: 'feature', on: true, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + end +end \ No newline at end of file From 4418ccef9cfca5d91380466838a47ff33e6501c4 Mon Sep 17 00:00:00 2001 From: Sam Stokes Date: Tue, 15 Jun 2021 10:45:13 -0700 Subject: [PATCH 178/292] Use camelCase for JSON property names (#151) The in_experiment attribute was added to reasons as part of #150 but it doesn't appear to be received in events. I think that's because it's sending it in JSON as "in_experiment" rather than "inExperiment" as we expect to parse it. --- lib/ldclient-rb/evaluation_detail.rb | 4 ++-- spec/impl/evaluator_rule_spec.rb | 6 +++--- spec/impl/evaluator_spec.rb | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index dc2e6bab..4eae67bc 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -245,7 +245,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json case @kind when :RULE_MATCH if @in_experiment - { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, in_experiment: @in_experiment } + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, inExperiment: @in_experiment } else { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } end @@ -255,7 +255,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json { kind: @kind, errorKind: @error_kind } when :FALLTHROUGH if @in_experiment - { kind: @kind, in_experiment: @in_experiment } + { kind: @kind, inExperiment: @in_experiment } else { kind: @kind } end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 8f6c207f..7299decb 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -99,7 +99,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -109,7 +109,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -119,7 +119,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 4b0f3741..543b524d 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -311,7 +311,7 @@ module Impl } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -325,7 +325,7 @@ module Impl } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -339,7 +339,7 @@ module Impl } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end end From 4c2fd31e1915192ddd318b010b7534486ef1b8b4 Mon Sep 17 00:00:00 2001 From: Kerrie Martinez Date: Wed, 16 Jun 2021 16:59:34 -0700 Subject: [PATCH 179/292] fixing ruby logic causing ih failures (#152) * fixing ruby logic * adding missing spec * Apply suggestions from code review Co-authored-by: Sam Stokes * pr tweaks * making spec language consistent Co-authored-by: Sam Stokes --- lib/ldclient-rb/impl/evaluator_bucketing.rb | 18 +-- spec/impl/evaluator_bucketing_spec.rb | 127 ++++++++++++++++---- 2 files changed, 112 insertions(+), 33 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index f2f2075f..11842f74 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -10,29 +10,26 @@ module EvaluatorBucketing # @param user [Object] the user properties # @return [Number] the variation index, or nil if there is an error def self.variation_index_for_user(flag, rule, user) - in_experiment = nil variation = rule[:variation] - return variation, in_experiment if !variation.nil? # fixed variation + return variation, false if !variation.nil? # fixed variation rollout = rule[:rollout] - return nil, in_experiment if rollout.nil? + return nil, false if rollout.nil? variations = rollout[:variations] if !variations.nil? && variations.length > 0 # percentage rollout - rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] seed = rollout[:seed] bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt], seed) # may not be present sum = 0; variations.each do |variate| - if rule[:rollout][:kind] == "experiment" && !variate[:untracked] + if rollout[:kind] == "experiment" && !variate[:untracked] in_experiment = true end sum += variate[:weight].to_f / 100000.0 - if bucket < sum - return variate[:variation], in_experiment + return variate[:variation], !!in_experiment end end # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due @@ -40,9 +37,12 @@ def self.variation_index_for_user(flag, rule, user) # data could contain buckets that don't actually add up to 100000. Rather than returning an error in # this case (or changing the scaling, which would potentially change the results for *all* users), we # will simply put the user in the last bucket. - [ variations[-1][:variation], in_experiment ] + last_variation = variations[-1] + in_experiment = rollout[:kind] == "experiment" && !last_variation[:untracked] + + [last_variation[:variation], in_experiment] else # the rule isn't well-formed - [ nil, in_experiment ] + [nil, false] end end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index 35775838..98dbd13d 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -6,7 +6,7 @@ describe "bucket_user" do describe "seed exists" do let(:seed) { 61 } - it "gets the expected bucket values for seed" do + it "returns the expected bucket values for seed" do user = { key: "userKeyA" } bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.09801207); @@ -20,14 +20,29 @@ expect(bucket).to be_within(0.0000001).of(0.9242641); end - it "should return the same bucket if the seed and user is the same" do + it "returns the same bucket regardless of hashKey and salt" do user = { key: "userKeyA" } - bucket1 = subject.bucket_user(user, "hashKey", "bucket_by", "saltyA", seed) - bucket2 = subject.bucket_user(user, "hashKey1", "bucket_by", "saltyB", seed) - bucket3 = subject.bucket_user(user, "hashKey2", "bucket_by", "saltyC", seed) + bucket1 = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_user(user, "hashKey1", "key", "saltyB", seed) + bucket3 = subject.bucket_user(user, "hashKey2", "key", "saltyC", seed) expect(bucket1).to eq(bucket2) expect(bucket2).to eq(bucket3) end + + it "returns a different bucket if the seed is not the same" do + user = { key: "userKeyA" } + bucket1 = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_user(user, "hashKey1", "key", "saltyB", seed+1) + expect(bucket1).to_not eq(bucket2) + end + + it "returns a different bucket if the user is not the same" do + user1 = { key: "userKeyA" } + user2 = { key: "userKeyB" } + bucket1 = subject.bucket_user(user1, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_user(user2, "hashKey1", "key", "saltyB", seed) + expect(bucket1).to_not eq(bucket2) + end end it "gets expected bucket values for specific keys" do @@ -84,54 +99,118 @@ end describe "variation_index_for_user" do - it "matches bucket" do - user = { key: "userkey" } + context "rollout is not an experiment" do + it "matches bucket" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + # so we can construct a rollout whose second bucket just barely contains that value + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + expect(bucket_value).to be > 0 + expect(bucket_value).to be < 100000 + + bad_variation_a = 0 + matched_variation = 1 + bad_variation_b = 2 + rule = { + rollout: { + variations: [ + { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value + { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value + { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + expect(result_variation).to be matched_variation + expect(inExperiment).to be(false) + end + + it "uses last bucket if bucket value is equal to total weight" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + + # We'll construct a list of variations that stops right at the target bucket value + rule = { + rollout: { + variations: [ + { variation: 0, weight: bucket_value } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + expect(result_variation).to be 0 + expect(inExperiment).to be(false) + end + end + end + + context "rollout is an experiment" do + it "returns whether user is in the experiment or not" do + user1 = { key: "userKeyA" } + user2 = { key: "userKeyB" } + user3 = { key: "userKeyC" } flag_key = "flagkey" salt = "salt" + seed = 61 - # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, - # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() - expect(bucket_value).to be > 0 - expect(bucket_value).to be < 100000 - - bad_variation_a = 0 - matched_variation = 1 - bad_variation_b = 2 + rule = { rollout: { + seed: seed, + kind: 'experiment', variations: [ - { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value - { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value - { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + { variation: 0, weight: 10000, untracked: false }, + { variation: 2, weight: 20000, untracked: false }, + { variation: 0, weight: 70000 , untracked: true } ] } } flag = { key: flag_key, salt: salt } - result_variation, _ = subject.variation_index_for_user(flag, rule, user) - expect(result_variation).to be matched_variation + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user1) + expect(result_variation).to be(0) + expect(inExperiment).to be(true) + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user2) + expect(result_variation).to be(2) + expect(inExperiment).to be(true) + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user3) + expect(result_variation).to be(0) + expect(inExperiment).to be(false) end it "uses last bucket if bucket value is equal to total weight" do user = { key: "userkey" } flag_key = "flagkey" salt = "salt" + seed = 61 - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, seed) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { rollout: { + seed: seed, + kind: 'experiment', variations: [ - { variation: 0, weight: bucket_value } + { variation: 0, weight: bucket_value, untracked: false } ] } } flag = { key: flag_key, salt: salt } - result_variation, _ = subject.variation_index_for_user(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) expect(result_variation).to be 0 + expect(inExperiment).to be(true) end end end From 03dd676ffba5d8cee38f353d9b4ff5f1c774f449 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 14 Jul 2021 16:45:23 -0700 Subject: [PATCH 180/292] add log warning for missing user key (#153) * add log warnings for nil/empty user key * rm warning for empty string key * fix test --- lib/ldclient-rb/ldclient.rb | 6 ++++++ spec/ldclient_spec.rb | 14 +++----------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 37d80e9a..d96dd1f7 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -407,6 +407,12 @@ def evaluate_internal(key, user, default, event_factory) return detail end + if user[:key].nil? + @config.logger.warn { "[LDClient] Variation called with nil user key; returning default value" } + detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) + return detail + end + if !initialized? if @store.initialized? @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index cad4c03c..8e2ef650 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -171,20 +171,12 @@ def event_processor client.variation("key", user_anonymous, "default") end - it "queues a feature event for an existing feature when user key is nil" do + it "does not queue a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: bad_user, - value: "default", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) client.variation("key", bad_user, "default") end From 0de2cab7f244d105b80617581cd01127a146a113 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Thu, 5 Aug 2021 17:39:31 -0700 Subject: [PATCH 181/292] diagnostic events should respect HTTPS_PROXY (#154) --- lib/ldclient-rb/impl/diagnostic_events.rb | 2 +- spec/diagnostic_events_spec.rb | 16 +++++++++------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb index 4c61a905..13a55756 100644 --- a/lib/ldclient-rb/impl/diagnostic_events.rb +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -79,7 +79,7 @@ def self.make_config_data(config) streamingDisabled: !config.stream?, userKeysCapacity: config.user_keys_capacity, userKeysFlushIntervalMillis: self.seconds_to_millis(config.user_keys_flush_interval), - usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY'), + usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY') || ENV.has_key?('HTTPS_PROXY'), usingRelayDaemon: config.use_ldd?, } ret diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index 0c4ef058..cc55e8f1 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -79,13 +79,15 @@ def expected_default_config end end - it "detects proxy" do - begin - ENV["http_proxy"] = 'http://my-proxy' - event = default_acc.create_init_event(Config.new) - expect(event[:configuration][:usingProxy]).to be true - ensure - ENV["http_proxy"] = nil + ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY'].each do |name| + it "detects proxy #{name}" do + begin + ENV["#{name}"] = 'http://my-proxy' + event = default_acc.create_init_event(Config.new) + expect(event[:configuration][:usingProxy]).to be true + ensure + ENV["#{name}"] = nil + end end end From 6bfb0301ab94817c3b94852fb68ab42563369070 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Fri, 6 Aug 2021 09:51:36 -0700 Subject: [PATCH 182/292] minor test simplification (#155) --- spec/diagnostic_events_spec.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index cc55e8f1..7e1bce7f 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -82,11 +82,11 @@ def expected_default_config ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY'].each do |name| it "detects proxy #{name}" do begin - ENV["#{name}"] = 'http://my-proxy' + ENV[name] = 'http://my-proxy' event = default_acc.create_init_event(Config.new) expect(event[:configuration][:usingProxy]).to be true ensure - ENV["#{name}"] = nil + ENV[name] = nil end end end From 57d8eaeaec0a31d463e28b642a34ced38acf3814 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 9 Aug 2021 17:50:44 -0700 Subject: [PATCH 183/292] allow higher minor versions of json and http gems --- launchdarkly-server-sdk.gemspec | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index dcf281fe..731c6947 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -38,8 +38,6 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" spec.add_runtime_dependency "ld-eventsource", "~> 2.0" - # lock json to 2.3.x as ruby libraries often remove - # support for older ruby versions in minor releases - spec.add_runtime_dependency "json", "~> 2.3.1" - spec.add_runtime_dependency "http", "~> 4.4.1" + spec.add_runtime_dependency "json", "~> 2.3" + spec.add_runtime_dependency "http", "~> 4.4" end From e1d7e83789d971a5a574a13a81eb78d5a9fccc51 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 10 Aug 2021 15:27:10 -0700 Subject: [PATCH 184/292] allow v5.x of http gem (#157) --- launchdarkly-server-sdk.gemspec | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 731c6947..d74ae3cf 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,8 +36,8 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" - spec.add_runtime_dependency "ld-eventsource", "~> 2.0" + spec.add_runtime_dependency "ld-eventsource", "2.0.1" spec.add_runtime_dependency "json", "~> 2.3" - spec.add_runtime_dependency "http", "~> 4.4" + spec.add_runtime_dependency "http", ">= 4.4.0", "< 6.0.0" end From d6b7c4301fc2ce198fa42c7d0ee685c81555f6e5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 11 Aug 2021 10:04:42 -0700 Subject: [PATCH 185/292] use Bundler 2.2.10 + modernize CI config (#158) --- .circleci/config.yml | 98 +++++++++++++++------------------ launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 44 insertions(+), 56 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6e7dd560..90ae4c57 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,66 +1,54 @@ -version: 2 +version: 2.1 workflows: version: 2 test: jobs: - - test-2.5 - - test-2.6 - - test-2.7 - - test-3.0 - - test-jruby-9.2 - -ruby-docker-template: &ruby-docker-template - steps: - - checkout - - run: | - if [[ $CIRCLE_JOB == test-jruby* ]]; then - gem install jruby-openssl; # required by bundler, no effect on Ruby MRI - sudo apt-get update -y && sudo apt-get install -y build-essential - fi - - run: ruby -v - - run: gem install bundler - - run: bundle install - - run: mkdir ./rspec - - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - - store_test_results: - path: ./rspec - - store_artifacts: - path: ./rspec + - build-test-linux: + name: Ruby 2.5 + docker-image: cimg/ruby:2.5 + - build-test-linux: + name: Ruby 2.6 + docker-image: cimg/ruby:2.6 + - build-test-linux: + name: Ruby 2.7 + docker-image: cimg/ruby:2.7 + - build-test-linux: + name: Ruby 3.0 + docker-image: cimg/ruby:3.0 + - build-test-linux: + name: JRuby 9.2 + docker-image: circleci/jruby:9.2-jdk + jruby: true jobs: - test-2.5: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.5 - - image: consul - - image: redis - - image: amazon/dynamodb-local - test-2.6: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.6 - - image: consul - - image: redis - - image: amazon/dynamodb-local - test-2.7: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.7 - - image: consul - - image: redis - - image: amazon/dynamodb-local - test-3.0: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:3.0 - - image: consul - - image: redis - - image: amazon/dynamodb-local - test-jruby-9.2: - <<: *ruby-docker-template + build-test-linux: + parameters: + docker-image: + type: string + jruby: + type: boolean + default: false + docker: - - image: circleci/jruby:9.2-jdk + - image: <> - image: consul - image: redis - image: amazon/dynamodb-local + + steps: + - checkout + - when: + condition: <> + steps: + - run: gem install jruby-openssl # required by bundler, no effect on Ruby MRI + - run: sudo apt-get update -y && sudo apt-get install -y build-essential + - run: ruby -v + - run: gem install bundler -v 2.2.10 + - run: bundle _2.2.10_ install + - run: mkdir ./rspec + - run: bundle _2.2.10_ exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - store_test_results: + path: ./rspec + - store_artifacts: + path: ./rspec diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index d74ae3cf..4321c874 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -22,7 +22,7 @@ Gem::Specification.new do |spec| spec.required_ruby_version = ">= 2.5.0" spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" - spec.add_development_dependency "bundler", "~> 2.1" + spec.add_development_dependency "bundler", "2.2.10" spec.add_development_dependency "rspec", "~> 3.10" spec.add_development_dependency "diplomat", "~> 2.4.2" spec.add_development_dependency "redis", "~> 4.2" From 3891054820c714f7fb1c29597f63fb6a001801ff Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Sep 2021 09:58:54 -0700 Subject: [PATCH 186/292] enable verbose rspec output --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 90ae4c57..6f0b95b4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -47,7 +47,7 @@ jobs: - run: gem install bundler -v 2.2.10 - run: bundle _2.2.10_ install - run: mkdir ./rspec - - run: bundle _2.2.10_ exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: bundle _2.2.10_ exec rspec --format documentation --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: From 483799fb122b91534051427f8f100f34f1aac7db Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Sep 2021 10:14:30 -0700 Subject: [PATCH 187/292] fix socket factory tests --- spec/event_sender_spec.rb | 5 +++-- spec/ldclient_end_to_end_spec.rb | 8 ++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 5ad3f2f1..9be4073c 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -50,7 +50,8 @@ def with_sender_and_server with_server do |server| server.setup_ok_response("/bulk", "") - config = Config.new(events_uri: "http://events.com/bulk", socket_factory: SocketFactoryFromHash.new({"events.com" => server.port}), logger: $null_log) + config = Config.new(events_uri: "http://fake-event-server/bulk", + socket_factory: SocketFactoryFromHash.new({"fake-event-server" => server.port})) es = subject.new(sdk_key, config, nil, 0.1) result = es.send_event_data(fake_data, "", false) @@ -58,7 +59,7 @@ def with_sender_and_server expect(result.success).to be true req = server.await_request expect(req.body).to eq fake_data - expect(req.host).to eq "events.com" + expect(req.host).to eq "fake-event-server" end end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index a820b608..6366a6b7 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -128,13 +128,13 @@ module LaunchDarkly config = Config.new( stream: false, - base_uri: "http://polling.com", - events_uri: "http://events.com", + base_uri: "http://fake-polling-server", + events_uri: "http://fake-events-server", diagnostic_opt_out: true, logger: NullLogger.new, socket_factory: SocketFactoryFromHash.new({ - "polling.com" => poll_server.port, - "events.com" => events_server.port + "fake-polling-server" => poll_server.port, + "fake-events-server" => events_server.port }) ) with_client(config) do |client| From b57c6f4152330a71311155880446339cbd50293a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Sep 2021 10:16:19 -0700 Subject: [PATCH 188/292] restore log suppression --- spec/event_sender_spec.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 9be4073c..31bfb6ae 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -51,7 +51,8 @@ def with_sender_and_server server.setup_ok_response("/bulk", "") config = Config.new(events_uri: "http://fake-event-server/bulk", - socket_factory: SocketFactoryFromHash.new({"fake-event-server" => server.port})) + socket_factory: SocketFactoryFromHash.new({"fake-event-server" => server.port}), + logger: $null_log) es = subject.new(sdk_key, config, nil, 0.1) result = es.send_event_data(fake_data, "", false) From dc13eeea58b7a8f577b3fcf1bc7d071297a577c5 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Tue, 21 Sep 2021 15:31:12 -0700 Subject: [PATCH 189/292] Replacing deprecated circleci image usage (#159) --- .circleci/config.yml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6f0b95b4..8ddba394 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,7 +18,7 @@ workflows: docker-image: cimg/ruby:3.0 - build-test-linux: name: JRuby 9.2 - docker-image: circleci/jruby:9.2-jdk + docker-image: jruby:9.2-jdk jruby: true jobs: @@ -42,7 +42,12 @@ jobs: condition: <> steps: - run: gem install jruby-openssl # required by bundler, no effect on Ruby MRI - - run: sudo apt-get update -y && sudo apt-get install -y build-essential + - run: apt-get update -y && apt-get install -y build-essential + - when: + condition: + not: <> + steps: + - run: sudo apt-get update -y && sudo apt-get install -y build-essential - run: ruby -v - run: gem install bundler -v 2.2.10 - run: bundle _2.2.10_ install From 4ff45fce8502d49d8b3eb145d2012a69c535a29c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 23 Sep 2021 13:27:18 -0700 Subject: [PATCH 190/292] use Releaser v2 config (#161) --- .ldrelease/build-docs.sh | 17 +++++++---------- .ldrelease/config.yml | 19 ++++++++++--------- 2 files changed, 17 insertions(+), 19 deletions(-) diff --git a/.ldrelease/build-docs.sh b/.ldrelease/build-docs.sh index 3b581297..8f41a5b6 100755 --- a/.ldrelease/build-docs.sh +++ b/.ldrelease/build-docs.sh @@ -2,17 +2,14 @@ # doc generation is not part of Releaser's standard Ruby project template -mkdir -p ./artifacts/ - cd ./docs make -cd .. +cd build/html -# Releaser will pick up docs generated in CI if we put an archive of them in the -# artifacts directory and name it docs.tar.gz or docs.zip. They will be uploaded -# to GitHub Pages and also attached as release artifacts. There's no separate -# "publish-docs" step because the external service that also hosts them doesn't -# require an upload, it just picks up gems automatically. +# Releaser will pick up generated docs if we put them in the designated +# directory. They will be uploaded to GitHub Pages and also attached as +# release artifacts. There's no separate "publish-docs" step because the +# external service that also hosts them doesn't require an upload, it just +# picks up gems automatically. -cd ./docs/build/html -tar cfz ../../../artifacts/docs.tar.gz * +cp -r * "${LD_RELEASE_DOCS_DIR}" diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index c1fcca80..59f2e577 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -1,8 +1,10 @@ +version: 2 + repo: public: ruby-server-sdk private: ruby-server-sdk-private -releasableBranches: +branches: - name: master - name: 5.x @@ -12,18 +14,17 @@ publications: - url: https://www.rubydoc.info/gems/launchdarkly-server-sdk description: documentation -template: - name: ruby - -circleci: - linux: - image: circleci/ruby:2.6.6-buster - context: org-global +jobs: + - docker: + image: ruby:2.5-buster + copyGitHistory: true # building gem requires git metadata due to use of "git ls-files" in the gemspec + template: + name: ruby env: LD_SKIP_DATABASE_TESTS: "1" # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI documentation: - githubPages: true + gitHubPages: true sdk: displayName: "Ruby" From 635b7f87e131644c47bf59f92365db8dd2e89d35 Mon Sep 17 00:00:00 2001 From: Ember Stevens Date: Fri, 24 Sep 2021 15:03:41 -0700 Subject: [PATCH 191/292] Updates docs URLs --- CHANGELOG.md | 4 ++-- CONTRIBUTING.md | 2 +- README.md | 6 +++--- lib/ldclient-rb/config.rb | 6 +++--- lib/ldclient-rb/integrations/dynamodb.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 2 +- lib/ldclient-rb/interfaces.rb | 2 +- lib/ldclient-rb/ldclient.rb | 10 +++++----- 8 files changed, 17 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5b8d1bd7..c875e3e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -167,7 +167,7 @@ The gem name will also change. In the 5.5.6 release, it is still `ldclient-rb`; ## [5.5.0] - 2019-01-17 ### Added: -- It is now possible to use Consul or DynamoDB as a persistent feature store, similar to the existing Redis integration. See the `LaunchDarkly::Integrations::Consul` and `LaunchDarkly::Integrations::DynamoDB` modules, and the reference guide [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). +- It is now possible to use Consul or DynamoDB as a persistent feature store, similar to the existing Redis integration. See the `LaunchDarkly::Integrations::Consul` and `LaunchDarkly::Integrations::DynamoDB` modules, and the reference guide [Persistent data stores](https://docs.launchdarkly.com/sdk/concepts/data-stores). - There is now a `LaunchDarkly::Integrations::Redis` module, which is the preferred method for creating a Redis feature store. - All of the database feature stores now support local caching not only for individual feature flag queries, but also for `all_flags_state`. - The `Config` property `data_source` is the new name for `update_processor` and `update_processor_factory`. @@ -249,7 +249,7 @@ Fixed a regression in version 5.0.0 that could prevent the client from reconnect ## [4.0.0] - 2018-05-10 ### Changed: -- To reduce the network bandwidth used for analytics events, feature request events are now sent as counters rather than individual events, and user details are now sent only at intervals rather than in each event. These behaviors can be modified through the LaunchDarkly UI and with the new configuration option `inline_users_in_events`. For more details, see [Analytics Data Stream Reference](https://docs.launchdarkly.com/v2.0/docs/analytics-data-stream-reference). +- To reduce the network bandwidth used for analytics events, feature request events are now sent as counters rather than individual events, and user details are now sent only at intervals rather than in each event. These behaviors can be modified through the LaunchDarkly UI and with the new configuration option `inline_users_in_events`. ### Removed: - JRuby 1.7 is no longer supported. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fb244f5c..49c6df85 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,7 +1,7 @@ Contributing to the LaunchDarkly Server-side SDK for Ruby ================================================ -LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. Submitting bug reports and feature requests ------------------ diff --git a/README.md b/README.md index 2f7b01c6..8125c068 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ LaunchDarkly Server-side SDK for Ruby LaunchDarkly overview ------------------------- -[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/docs/getting-started) using LaunchDarkly today! +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today! [![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) @@ -22,7 +22,7 @@ This version of the LaunchDarkly SDK has a minimum Ruby version of 2.5.0, or 9.2 Getting started ----------- -Refer to the [SDK documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-getting-started) for instructions on getting started with using the SDK. +Refer to the [SDK documentation](https://docs.launchdarkly.com/sdk/server-side/ruby#getting-started) for instructions on getting started with using the SDK. Learn more ----------- @@ -49,7 +49,7 @@ About LaunchDarkly * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. -* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/docs) for a complete list. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. * Explore LaunchDarkly * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index edb21924..95cda71e 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -110,8 +110,8 @@ def stream? # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not # use polling or streaming to get feature flag updates from the server, but instead reads them # from the {#feature_store feature store}, which is assumed to be a database that is populated by - # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) - # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/home/relay-proxy) + # and ["Using a persistent data stores"](https://docs.launchdarkly.com/sdk/concepts/data-stores). # # All other properties related to streaming or polling are ignored if this option is set to true. # @@ -189,7 +189,7 @@ def offline? # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. # - # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more information, see ["Persistent data stores"](https://docs.launchdarkly.com/sdk/concepts/data-stores). # # @return [LaunchDarkly::Interfaces::FeatureStore] # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 189e118f..c3af07d5 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -7,7 +7,7 @@ module DynamoDB # # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can # use a persistent feature store, see the - # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # [SDK reference guide](https://docs.launchdarkly.com/sdk/features/storing-data#ruby). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 22bad6ef..5792d554 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -25,7 +25,7 @@ def self.default_prefix # # Creates a Redis-backed persistent feature store. For more details about how and why you can # use a persistent feature store, see the - # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # [SDK reference guide](https://docs.launchdarkly.com/sdk/features/storing-data#rubys). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, # put the object returned by this method into the `feature_store` property of your diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index d2a9f862..9ea0932b 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -9,7 +9,7 @@ module Interfaces # client uses the feature store to persist feature flags and related objects received from # the LaunchDarkly service. Implementations must support concurrent access and updates. # For more about how feature stores can be used, see: - # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # [Using a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data#ruby). # # An entity that can be stored in a feature store is a hash that can be converted to and from # JSON, and that has at a minimum the following properties: `:key`, a string that is unique diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index d96dd1f7..1ba2e244 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -132,7 +132,7 @@ def toggle?(key, user, default = false) # # Creates a hash string that can be used by the JavaScript SDK to identify a user. - # For more information, see [Secure mode](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # For more information, see [Secure mode](https://docs.launchdarkly.com/sdk/features/secure-mode#configuring-secure-mode-in-the-javascript-client-side-sdk). # # @param user [Hash] the user properties # @return [String] a hash string @@ -172,11 +172,11 @@ def initialized? # # Other supported user attributes include IP address, country code, and an arbitrary hash of # custom attributes. For more about the supported user properties and how they work in - # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/docs/targeting-users). + # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/home/flags/targeting-users). # # The optional `:privateAttributeNames` user property allows you to specify a list of # attribute names that should not be sent back to LaunchDarkly. - # [Private attributes](https://docs.launchdarkly.com/docs/private-user-attributes) + # [Private attributes](https://docs.launchdarkly.com/home/users/attributes#creating-private-user-attributes) # can also be configured globally in {Config}. # # @example Basic user hash @@ -213,7 +213,7 @@ def variation(key, user, default) # be included in analytics events, if you are capturing detailed event data for this flag. # # For more information, see the reference guide on - # [Evaluation reasons](https://docs.launchdarkly.com/v2.0/docs/evaluation-reasons). + # [Evaluation reasons](https://docs.launchdarkly.com/sdk/concepts/evaluation-reasons). # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard @@ -260,7 +260,7 @@ def identify(user) # # As of this version’s release date, the LaunchDarkly service does not support the `metricValue` # parameter. As a result, specifying `metricValue` will not yet produce any different behavior - # from omitting it. Refer to the [SDK reference guide](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-track) + # from omitting it. Refer to the [SDK reference guide](https://docs.launchdarkly.com/sdk/features/events#ruby) # for the latest status. # # @param event_name [String] The name of the event From fbbe4e6f0c9a3591c72e8bdd5956b7876f496cfd Mon Sep 17 00:00:00 2001 From: ember-stevens <79482775+ember-stevens@users.noreply.github.com> Date: Sun, 26 Sep 2021 11:02:13 -0700 Subject: [PATCH 192/292] Update lib/ldclient-rb/ldclient.rb Co-authored-by: Louis Chan <91093020+louis-launchdarkly@users.noreply.github.com> --- lib/ldclient-rb/ldclient.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1ba2e244..ba2a7675 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -132,7 +132,7 @@ def toggle?(key, user, default = false) # # Creates a hash string that can be used by the JavaScript SDK to identify a user. - # For more information, see [Secure mode](https://docs.launchdarkly.com/sdk/features/secure-mode#configuring-secure-mode-in-the-javascript-client-side-sdk). + # For more information, see [Secure mode](https://docs.launchdarkly.com/sdk/features/secure-mode#ruby). # # @param user [Hash] the user properties # @return [String] a hash string From 7e2efc89246457079261ce94489f1960cb37c727 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 8 Oct 2021 14:55:30 -0400 Subject: [PATCH 193/292] remove reliance on git in gemspec (#163) --- .ldrelease/config.yml | 1 - launchdarkly-server-sdk.gemspec | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index 59f2e577..7c44d0e5 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -17,7 +17,6 @@ publications: jobs: - docker: image: ruby:2.5-buster - copyGitHistory: true # building gem requires git metadata due to use of "git ls-files" in the gemspec template: name: ruby env: diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 4321c874..4be199b3 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -3,6 +3,7 @@ lib = File.expand_path("../lib", __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require "ldclient-rb/version" +require "rake" # rubocop:disable Metrics/BlockLength Gem::Specification.new do |spec| @@ -15,9 +16,8 @@ Gem::Specification.new do |spec| spec.homepage = "https://github.com/launchdarkly/ruby-server-sdk" spec.license = "Apache-2.0" - spec.files = `git ls-files -z`.split("\x0") + spec.files = FileList["lib/**/*", "README.md", "LICENSE.txt"] spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } - spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.required_ruby_version = ">= 2.5.0" From 833105824643922df85a42bf066bb45ad0cfa8c9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 12 Oct 2021 16:00:19 -0700 Subject: [PATCH 194/292] use ruby-eventsource 2.1.1 for fix of sc-123850 and sc-125504 (#164) * use ruby-eventsource 2.1.1 for fix of sc-123850 and sc-125504 * comment phrasing --- launchdarkly-server-sdk.gemspec | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 4be199b3..d2be98d1 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,7 +36,9 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" - spec.add_runtime_dependency "ld-eventsource", "2.0.1" + spec.add_runtime_dependency "ld-eventsource", "2.1.1" + # Please keep ld-eventsource dependency as an exact version so that bugfixes to + # that LD library are always associated with a new SDK version. spec.add_runtime_dependency "json", "~> 2.3" spec.add_runtime_dependency "http", ">= 4.4.0", "< 6.0.0" From 951df1af62020020ff9228346f74d5de5e7aacca Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Sun, 21 Nov 2021 21:21:39 +0000 Subject: [PATCH 195/292] Start work on flag builder. --- .../impl/integrations/test_data_impl.rb | 114 ++++++++++++++++++ spec/impl/integrations/test_data_impl_spec.rb | 76 ++++++++++++ 2 files changed, 190 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/test_data_impl.rb create mode 100644 spec/impl/integrations/test_data_impl_spec.rb diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb new file mode 100644 index 00000000..e1770bca --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -0,0 +1,114 @@ + +module LaunchDarkly + module Impl + module Integrations + class TestData + class FlagBuilder + private + TRUE_VARIATION_INDEX = 0 + FALSE_VARIATION_INDEX = 1 + + def is_boolean_flag + @variations.size == 2 && + @variations[TRUE_VARIATION_INDEX] == true && + @variations[FALSE_VARIATION_INDEX] == false + end + + def variation_for_boolean(variation) + variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX + end + + def set_rules(rules) + self + end + + def set_targets(rules) + self + end + + public + def initialize(key, **args) + @key = key + @on = args[:on] || true + @variations = args[:variations] || [] + @off_variation = args[:off_variation] + @fallthrough_variation = args[:fallthrough_variation] + @rules = args[:rules] + @targets = args[:targets] + end + + def copy + FlagBuilder.new @key, + on: @on, + variations: @variations.clone, + off_variation: @off_variation, + fallthrough_variation: @fallthrough_variation, + rules: @rules, + targets: @targets + end + + def on(aBool) + @on = aBool + self + end + + def fallthrough_variation(variation) + if [true,false].include? variation then + boolean_flag.fallthrough_variation(variation_for_boolean(variation)) + else + @fallthrough_variation = variation + self + end + end + + def off_variation(variation) + if [true,false].include? variation then + boolean_flag.off_variation(variation_for_boolean(variation)) + else + @off_variation = variation + self + end + end + + def variations(*variations) + @variations = variations + self + end + + def variation_for_all_users(variation) + if [true,false].include? variation then + boolean_flag.variation_for_all_users(variation_for_boolean(variation)) + else + on(true).fallthrough_variation(variation) + end + end + + def value_for_all_users(value) + variations(value).variation_for_all_users(0) + end + + def boolean_flag + if is_boolean_flag then + self + else + variations(true, false) + .fallthrough_variation(TRUE_VARIATION_INDEX) + .off_variation(FALSE_VARIATION_INDEX) + end + end + + def build(version) + { + key: @key, + version: version, + on: @on, + off_variation: @off_variation, + fallthrough: { variation: @fallthrough_variation }, + variations: @variations + } + end + end + end + end + end +end diff --git a/spec/impl/integrations/test_data_impl_spec.rb b/spec/impl/integrations/test_data_impl_spec.rb new file mode 100644 index 00000000..5f048fe7 --- /dev/null +++ b/spec/impl/integrations/test_data_impl_spec.rb @@ -0,0 +1,76 @@ +require "ldclient-rb/impl/integrations/test_data_impl" + +module LaunchDarkly + module Impl + module Integrations + describe 'TestData' do + describe 'FlagBuilder' do + + it 'defaults to targeting on and sets the flag key' do + f = TestData::FlagBuilder.new('flag').build(1) + expect(f[:key]).to eq('flag') + expect(f[:version]).to eq(1) + expect(f[:on]).to eq(true) + expect(f[:variations]).to be_empty + end + + it 'can set targeting off' do + f = TestData::FlagBuilder.new('flag').on(false).build(1) + expect(f[:on]).to eq(false) + end + + it 'can set fallthrough variation' do + f = TestData::FlagBuilder.new('flag').fallthrough_variation(0).build(1) + expect(f[:fallthrough][:variation]).to eq(0) + end + + it 'can set variation for when targeting is off' do + f = TestData::FlagBuilder.new('flag').off_variation(0).build(1) + expect(f[:off_variation]).to eq(0) + end + + it 'can set a list of variations' do + f = TestData::FlagBuilder.new('flag').variations(true, false).build(1) + expect(f[:variations]).to eq([true, false]) + end + + it 'has the boolean_flag shortcut method' do + f = TestData::FlagBuilder.new('flag').boolean_flag.build(1) + expect(f[:variations]).to eq([true, false]) + expect(f[:fallthrough][:variation]).to eq(0) + expect(f[:off_variation]).to eq(1) + end + + it 'can handle boolean or index variation' do + f = TestData::FlagBuilder.new('flag').off_variation(true).build(1) + expect(f[:variations]).to eq([true, false]) + expect(f[:off_variation]).to eq(0) + + f2 = TestData::FlagBuilder.new('flag').fallthrough_variation(true).build(1) + expect(f2[:variations]).to eq([true, false]) + expect(f2[:off_variation]).to eq(1) + end + + it 'can set variation for all users' do + f = TestData::FlagBuilder.new('flag').variation_for_all_users(true).build(1) + expect(f[:rules]).to be_nil + expect(f[:targets]).to be_nil + expect(f[:fallthrough][:variation]).to be(0) + end + + it 'can make an immutable copy of its self' do + fb = TestData::FlagBuilder.new('flag').variation_for_all_users(true) + fbcopy = fb.copy.variation_for_all_users(false) + f = fb.build(0) + fcopy = fbcopy.build(0) + + expect(f[:key]).to eql(fcopy[:key]) + expect(f[:variations]).to eql(fcopy[:variations]) + expect(f[:fallthrough][:variation]).to be(0) + expect(fcopy[:fallthrough][:variation]).to be(1) + end + end + end + end + end +end From 7f9091d176736af09b0e0ef4d0c91a37a2ed820f Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Mon, 22 Nov 2021 16:40:30 +0000 Subject: [PATCH 196/292] Add user targeting and rule builder --- .../impl/integrations/test_data_impl.rb | 256 +++++++++++++----- spec/impl/integrations/test_data_impl_spec.rb | 50 +++- 2 files changed, 236 insertions(+), 70 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index e1770bca..9fc33ac6 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -3,110 +3,230 @@ module LaunchDarkly module Impl module Integrations class TestData - class FlagBuilder - private - TRUE_VARIATION_INDEX = 0 - FALSE_VARIATION_INDEX = 1 - def is_boolean_flag - @variations.size == 2 && - @variations[TRUE_VARIATION_INDEX] == true && - @variations[FALSE_VARIATION_INDEX] == false + class DeepCopyHash < Hash + def initialize_copy(other) + other.each do | key, value | + self[key] = value.clone end + end + end - def variation_for_boolean(variation) - variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX + class DeepCopyArray < Array + def initialize_copy(other) + other.each do | value | + self.push(value.clone) end + end + end + + class FlagBuilder + def initialize(key) + @key = key + @on = true + @variations = [] + end + + def initialize_copy(other) + super(other) + @variations = @variations.clone + @rules = @rules.nil? ? nil : @rules.clone + @targets = @targets.nil? ? nil : @targets.clone + end + + def on(aBool) + @on = aBool + self + end - def set_rules(rules) + def fallthrough_variation(variation) + if [true,false].include? variation then + boolean_flag.fallthrough_variation(variation_for_boolean(variation)) + else + @fallthrough_variation = variation self end + end - def set_targets(rules) + def off_variation(variation) + if [true,false].include? variation then + boolean_flag.off_variation(variation_for_boolean(variation)) + else + @off_variation = variation self end + end - public - def initialize(key, **args) - @key = key - @on = args[:on] || true - @variations = args[:variations] || [] - @off_variation = args[:off_variation] - @fallthrough_variation = args[:fallthrough_variation] - @rules = args[:rules] - @targets = args[:targets] + def variations(*variations) + @variations = variations + self + end + + def variation_for_all_users(variation) + if [true,false].include? variation then + boolean_flag.variation_for_all_users(variation_for_boolean(variation)) + else + on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) end + end + + def value_for_all_users(value) + variations(value).variation_for_all_users(0) + end - def copy - FlagBuilder.new @key, - on: @on, - variations: @variations.clone, - off_variation: @off_variation, - fallthrough_variation: @fallthrough_variation, - rules: @rules, - targets: @targets + def variation_for_user(user_key, variation) + if [true,false].include? variation then + boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) + else + if @targets.nil? then + @targets = DeepCopyHash.new + end + @variations.count.times do | i | + if i == variation then + if @targets[i].nil? then + @targets[i] = [user_key] + else + @targets[i].push(user_key) + end + elsif not @targets[i].nil? then + @targets[i].delete(user_key) + end + end + self end + end + + def if_match(attribute, *values) + FlagRuleBuilder.new(self).and_match(attribute, *values) + end + def if_not_match(attribute, *values) + FlagRuleBuilder.new(self).and_not_match(attribute, *values) + end + + def clear_user_targets + @targets = nil + self + end - def on(aBool) - @on = aBool + def clear_rules + @rules = nil + self + end + + def add_rule(rule) + if @rules.nil? then + @rules = DeepCopyArray.new + end + @rules.push(rule) + self + end + + def boolean_flag + if is_boolean_flag then self + else + variations(true, false) + .fallthrough_variation(TRUE_VARIATION_INDEX) + .off_variation(FALSE_VARIATION_INDEX) end + end - def fallthrough_variation(variation) - if [true,false].include? variation then - boolean_flag.fallthrough_variation(variation_for_boolean(variation)) - else - @fallthrough_variation = variation - self - end + def build(version) + res = { key: @key, + version: version, + on: @on, + } + + unless @off_variation.nil? then + res[:off_variation] = @off_variation end - def off_variation(variation) - if [true,false].include? variation then - boolean_flag.off_variation(variation_for_boolean(variation)) - else - @off_variation = variation - self - end + unless @fallthrough_variation.nil? then + res[:fallthrough] = { variation: @fallthrough_variation } end - def variations(*variations) - @variations = variations - self + unless @variations.nil? then + res[:variations] = @variations end - def variation_for_all_users(variation) - if [true,false].include? variation then - boolean_flag.variation_for_all_users(variation_for_boolean(variation)) - else - on(true).fallthrough_variation(variation) + unless @targets.nil? then + targets = Array.new + @targets.each do | variation, values | + targets.push({ variation: variation, values: values }) end + res[:targets] = targets + end + + unless @rules.nil? then + res[:rules] = @rules.each_with_index.collect { | rule, i | rule.build(i) } + end + res + end + + class FlagRuleBuilder + def initialize(flag_builder) + @flag_builder = flag_builder + @clauses = DeepCopyArray.new + end + + def intialize_copy(other) + super(other) + @clauses = @clauses.clone + end + + def and_match(attribute, *values) + @clauses.push({ + attribute: attribute, + op: 'in', + values: values, + negate: false + }) + self end - def value_for_all_users(value) - variations(value).variation_for_all_users(0) + def and_not_match(attribute, *values) + @clauses.push({ + attribute: attribute, + op: 'in', + values: values, + negate: true + }) + self end - def boolean_flag - if is_boolean_flag then - self + def then_return(variation) + if [true, false].include? variation then + @variation = @flag_builder.variation_for_boolean(variation) + @flag_builder.boolean_flag.add_rule(self) else - variations(true, false) - .fallthrough_variation(TRUE_VARIATION_INDEX) - .off_variation(FALSE_VARIATION_INDEX) + @variation = variation + @flag_builder.add_rule(self) end end - def build(version) + def build(ri) { - key: @key, - version: version, - on: @on, - off_variation: @off_variation, - fallthrough: { variation: @fallthrough_variation }, - variations: @variations + id: 'rule' + ri.to_s, + variation: @variation, + clauses: @clauses } end + end + + def variation_for_boolean(variation) + variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX + end + + private + TRUE_VARIATION_INDEX = 0 + FALSE_VARIATION_INDEX = 1 + + def is_boolean_flag + @variations.size == 2 && + @variations[TRUE_VARIATION_INDEX] == true && + @variations[FALSE_VARIATION_INDEX] == false + end + end end end diff --git a/spec/impl/integrations/test_data_impl_spec.rb b/spec/impl/integrations/test_data_impl_spec.rb index 5f048fe7..92087b3b 100644 --- a/spec/impl/integrations/test_data_impl_spec.rb +++ b/spec/impl/integrations/test_data_impl_spec.rb @@ -58,17 +58,63 @@ module Integrations expect(f[:fallthrough][:variation]).to be(0) end + it 'clears existing rules when setting variation for all users' do + f = TestData::FlagBuilder.new('flag') + .if_match('name', 'ben') + .then_return(false) + .variation_for_user('ben', false) + .variation_for_all_users(true).build(1) + expect(f[:rules]).to be_nil + expect(f[:targets]).to be_nil + expect(f[:fallthrough][:variation]).to be(0) + end + + it 'can set a variation for a specific user' do + f = TestData::FlagBuilder.new('flag') + .variation_for_user('ben', false) + f2 = f.clone.variation_for_user('ben', true) + expect(f.build(0)[:targets]).to eql([ { variation: 1, values: ['ben'] } ]) + expect(f2.build(1)[:targets]).to_not include({ variation: 1, values: ['ben'] }) + expect(f2.build(1)[:targets]).to include({ variation: 0, values: ['ben'] }) + end + it 'can make an immutable copy of its self' do fb = TestData::FlagBuilder.new('flag').variation_for_all_users(true) - fbcopy = fb.copy.variation_for_all_users(false) + expect(fb.build(0)).to eql(fb.clone.build(0)) + + fcopy = fb.clone.variation_for_all_users(false).build(0) f = fb.build(0) - fcopy = fbcopy.build(0) expect(f[:key]).to eql(fcopy[:key]) expect(f[:variations]).to eql(fcopy[:variations]) expect(f[:fallthrough][:variation]).to be(0) expect(fcopy[:fallthrough][:variation]).to be(1) end + + it 'can build rules based on attributes' do + f = TestData::FlagBuilder.new('flag') + .if_match('name', 'ben') + .and_not_match('country', 'fr') + .then_return(true) + .build(1) + expect(f[:rules]).to eql([{ + id: "rule0", + variation: 0, + clauses: [{ + attribute: 'name', + op: 'in', + values: ['ben'], + negate: false, + }, + { + attribute: 'country', + op: 'in', + values: ['fr'], + negate: true, + } + ] + }]) + end end end end From 4fdc7e814edf7270480174b64b842948cedb66d7 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Mon, 22 Nov 2021 21:44:00 +0000 Subject: [PATCH 197/292] Add datasource implementation --- .../impl/integrations/test_data_impl.rb | 94 +++++++++++++++++- lib/ldclient-rb/impl/store_data_set_sorter.rb | 1 + spec/impl/integrations/test_data_impl_spec.rb | 97 +++++++++++++++++++ 3 files changed, 190 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index 9fc33ac6..2cd74c8f 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -1,8 +1,95 @@ +require 'concurrent/atomics' +require 'ldclient-rb/interfaces' module LaunchDarkly module Impl module Integrations class TestData + def self.factory + TestData.new + end + + def arity + 2 + end + + def call(_, config) + impl = DataSourceImpl.new(config.feature_store, self) + @instances_lock.with_write_lock { @instances.push(impl) } + impl + end + + def make_init_data + { FEATURES => @current_flags } + end + + def closed_instance(instance) + @instances_lock.with_write_lock { @instances.delete(instance) } + end + + def initialize + @flag_builders = Hash.new + @current_flags = Hash.new + @instances = Array.new + @instances_lock = Concurrent::ReadWriteLock.new + @lock = Concurrent::ReadWriteLock.new + end + + def flag(flag_name) + existing_builder = @lock.with_read_lock { @flag_builders[flag_name] } + if existing_builder.nil? then + FlagBuilder.new(flag_name).boolean_flag + else + existing_builder.clone + end + end + + def update(flag_builder) + new_flag = nil + @lock.with_write_lock do + @flag_builders[flag_builder.key] = flag_builder + version = 0 + if @current_flags[flag_builder.key] then + version = @current_flags[flag_builder.key][:version] + end + new_flag = flag_builder.build(version+1) + @current_flags[flag_builder.key] = new_flag + end + @instances_lock.with_read_lock do + @instances.each do | instance | + instance.upsert(new_flag) + end + end + end + + class DataSourceImpl + include LaunchDarkly::Interfaces::DataSource + + def initialize(feature_store, test_data) + @feature_store = feature_store + @test_data = test_data + end + + def initialized? + true + end + + def start + ready = Concurrent::Event.new + ready.set + init_data = @test_data.make_init_data + @feature_store.init(init_data) + ready + end + + def stop + @test_data.closed_instance(self) + end + + def upsert(new_flag) + @feature_store.upsert(FEATURES, new_flag) + end + end class DeepCopyHash < Hash def initialize_copy(other) @@ -21,6 +108,8 @@ def initialize_copy(other) end class FlagBuilder + attr_reader :key + def initialize(key) @key = key @on = true @@ -160,13 +249,14 @@ def build(version) unless @rules.nil? then res[:rules] = @rules.each_with_index.collect { | rule, i | rule.build(i) } end + res end class FlagRuleBuilder def initialize(flag_builder) @flag_builder = flag_builder - @clauses = DeepCopyArray.new + @clauses = Array.new end def intialize_copy(other) @@ -208,7 +298,7 @@ def build(ri) { id: 'rule' + ri.to_s, variation: @variation, - clauses: @clauses + clauses: @clauses.clone } end end diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4454fe75..c13795ef 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -37,6 +37,7 @@ def self.sort_collection(kind, input) # pick a random item that hasn't been updated yet key, item = remaining_items.first self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + remaining_items.delete(key) # we won't need to visit this item again end items_out end diff --git a/spec/impl/integrations/test_data_impl_spec.rb b/spec/impl/integrations/test_data_impl_spec.rb index 92087b3b..1d94b8f6 100644 --- a/spec/impl/integrations/test_data_impl_spec.rb +++ b/spec/impl/integrations/test_data_impl_spec.rb @@ -1,9 +1,106 @@ require "ldclient-rb/impl/integrations/test_data_impl" +require "ldclient-rb/cache_store" +require "ldclient-rb/interfaces" +require "ldclient-rb/in_memory_store" +require "ldclient-rb/config" +require "ldclient-rb/events" +require "ldclient-rb/ldclient" module LaunchDarkly module Impl module Integrations describe 'TestData' do + it 'is a valid datasource' do + td = TestData.factory + config = LaunchDarkly::Config.new(send_events: false, data_source: td) + client = LaunchDarkly::LDClient.new('sdkKey', config) + expect(config.feature_store.all(LaunchDarkly::FEATURES)).to eql({}) + client.close + end + + it 'initializes the feature store with existing flags' do + td = TestData.factory + td.update(td.flag('flag')) + config = LaunchDarkly::Config.new(send_events: false, data_source: td) + client = LaunchDarkly::LDClient.new('sdkKey', config) + expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + off_variation: 1, + on: true, + version: 1 + }) + client.close + end + + it 'updates the feature store with new flags' do + td = TestData.factory + td.update(td.flag('flag')) + config = LaunchDarkly::Config.new(send_events: false, data_source: td) + client = LaunchDarkly::LDClient.new('sdkKey', config) + config2 = LaunchDarkly::Config.new(send_events: false, data_source: td) + client2 = LaunchDarkly::LDClient.new('sdkKey', config2) + + expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + off_variation: 1, + on: true, + version: 1 + }) + expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + off_variation: 1, + on: true, + version: 1 + }) + + td.update(td.flag('flag').variation_for_all_users(false)) + + expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 1 }, + off_variation: 1, + on: true, + version: 2 + }) + expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 1 }, + off_variation: 1, + on: true, + version: 2 + }) + + client.close + client2.close + end + + it 'TestData.flag defaults to a boolean flag' do + td = TestData.new + f = td.flag('flag').build(0) + expect(f[:variations]).to eq([true, false]) + expect(f[:fallthrough][:variation]).to eq(0) + expect(f[:off_variation]).to eq(1) + end + + it 'TestData.flag returns a copy of the existing flag if it exists' do + td = TestData.new + td.update(td.flag('flag').variation_for_all_users(true)) + expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) + + #modify the flag but dont call update + td.flag('flag').variation_for_all_users(false).build(0) + + expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) + end + describe 'FlagBuilder' do it 'defaults to targeting on and sets the flag key' do From 58c60d72bc15f3806a27504a9cbb3fbcd3340f13 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Tue, 23 Nov 2021 00:45:01 +0000 Subject: [PATCH 198/292] Convert the current_flags hash to use symbols instead of strings as keys --- lib/ldclient-rb/impl/integrations/test_data_impl.rb | 7 ++++--- lib/ldclient-rb/impl/store_data_set_sorter.rb | 1 - 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index 2cd74c8f..505ef037 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -49,11 +49,12 @@ def update(flag_builder) @lock.with_write_lock do @flag_builders[flag_builder.key] = flag_builder version = 0 - if @current_flags[flag_builder.key] then - version = @current_flags[flag_builder.key][:version] + flag_key = flag_builder.key.to_sym + if @current_flags[flag_key] then + version = @current_flags[flag_key][:version] end new_flag = flag_builder.build(version+1) - @current_flags[flag_builder.key] = new_flag + @current_flags[flag_key] = new_flag end @instances_lock.with_read_lock do @instances.each do | instance | diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index c13795ef..4454fe75 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -37,7 +37,6 @@ def self.sort_collection(kind, input) # pick a random item that hasn't been updated yet key, item = remaining_items.first self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) - remaining_items.delete(key) # we won't need to visit this item again end items_out end From 42e642ea61c5e4e84456f1055800ef18627fdff9 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Mon, 22 Nov 2021 18:52:21 -0600 Subject: [PATCH 199/292] Fix typo on FlagRuleBuilder copy constructor --- lib/ldclient-rb/impl/integrations/test_data_impl.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index 9fc33ac6..fa5eef8e 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -169,7 +169,7 @@ def initialize(flag_builder) @clauses = DeepCopyArray.new end - def intialize_copy(other) + def initialize_copy(other) super(other) @clauses = @clauses.clone end From 2b75a67328b7b326812feb0e113bca6df3b37a7d Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Tue, 23 Nov 2021 15:24:50 +0000 Subject: [PATCH 200/292] minor refactoring of impl; Added use of new Clause struct instead of Hash in FlagRuleBuilder; Moved TestData.factory out of Impl namespace and renamed Impl to TestDataImpl --- .../impl/integrations/test_data_impl.rb | 28 +++++----- lib/ldclient-rb/integrations.rb | 8 ++- lib/ldclient-rb/integrations/test_data.rb | 11 ++++ spec/impl/integrations/test_data_impl_spec.rb | 52 +++++++++---------- 4 files changed, 56 insertions(+), 43 deletions(-) create mode 100644 lib/ldclient-rb/integrations/test_data.rb diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index 505ef037..138fb1bf 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -4,17 +4,13 @@ module LaunchDarkly module Impl module Integrations - class TestData - def self.factory - TestData.new - end - + class TestDataImpl def arity 2 end def call(_, config) - impl = DataSourceImpl.new(config.feature_store, self) + impl = TestDataSource.new(config.feature_store, self) @instances_lock.with_write_lock { @instances.push(impl) } impl end @@ -63,7 +59,7 @@ def update(flag_builder) end end - class DataSourceImpl + class TestDataSource include LaunchDarkly::Interfaces::DataSource def initialize(feature_store, test_data) @@ -240,11 +236,9 @@ def build(version) end unless @targets.nil? then - targets = Array.new - @targets.each do | variation, values | - targets.push({ variation: variation, values: values }) + res[:targets] = @targets.collect do | variation, values | + { variation: variation, values: values } end - res[:targets] = targets end unless @rules.nil? then @@ -255,6 +249,8 @@ def build(version) end class FlagRuleBuilder + FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) + def initialize(flag_builder) @flag_builder = flag_builder @clauses = Array.new @@ -266,22 +262,22 @@ def intialize_copy(other) end def and_match(attribute, *values) - @clauses.push({ + @clauses.push(FlagRuleClause.new( attribute: attribute, op: 'in', values: values, negate: false - }) + )) self end def and_not_match(attribute, *values) - @clauses.push({ + @clauses.push(FlagRuleClause.new( attribute: attribute, op: 'in', values: values, negate: true - }) + )) self end @@ -299,7 +295,7 @@ def build(ri) { id: 'rule' + ri.to_s, variation: @variation, - clauses: @clauses.clone + clauses: @clauses.collect(&:to_h) } end end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 8c9f6249..e161e18b 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,6 +1,7 @@ require "ldclient-rb/integrations/consul" require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" +require "ldclient-rb/integrations/test_data" require "ldclient-rb/integrations/util/store_wrapper" module LaunchDarkly @@ -18,7 +19,7 @@ module Integrations module Consul # code is in ldclient-rb/impl/integrations/consul_impl end - + # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # @@ -51,5 +52,10 @@ module Redis module Util # code is in ldclient-rb/integrations/util/ end + + + module TestData + # code is in ldclient-rb/impl/integrations/test_data_impl + end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb new file mode 100644 index 00000000..04af121c --- /dev/null +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -0,0 +1,11 @@ +require 'ldclient-rb/impl/integrations/test_data_impl' + +module LaunchDarkly + module Integrations + module TestData + def self.factory + LaunchDarkly::Impl::Integrations::TestDataImpl.new + end + end + end +end diff --git a/spec/impl/integrations/test_data_impl_spec.rb b/spec/impl/integrations/test_data_impl_spec.rb index 1d94b8f6..6d89b31a 100644 --- a/spec/impl/integrations/test_data_impl_spec.rb +++ b/spec/impl/integrations/test_data_impl_spec.rb @@ -1,4 +1,4 @@ -require "ldclient-rb/impl/integrations/test_data_impl" +require "ldclient-rb/integrations/test_data" require "ldclient-rb/cache_store" require "ldclient-rb/interfaces" require "ldclient-rb/in_memory_store" @@ -11,7 +11,7 @@ module Impl module Integrations describe 'TestData' do it 'is a valid datasource' do - td = TestData.factory + td = LaunchDarkly::Integrations::TestData.factory config = LaunchDarkly::Config.new(send_events: false, data_source: td) client = LaunchDarkly::LDClient.new('sdkKey', config) expect(config.feature_store.all(LaunchDarkly::FEATURES)).to eql({}) @@ -19,7 +19,7 @@ module Integrations end it 'initializes the feature store with existing flags' do - td = TestData.factory + td = LaunchDarkly::Integrations::TestData.factory td.update(td.flag('flag')) config = LaunchDarkly::Config.new(send_events: false, data_source: td) client = LaunchDarkly::LDClient.new('sdkKey', config) @@ -35,7 +35,7 @@ module Integrations end it 'updates the feature store with new flags' do - td = TestData.factory + td = LaunchDarkly::Integrations::TestData.factory td.update(td.flag('flag')) config = LaunchDarkly::Config.new(send_events: false, data_source: td) client = LaunchDarkly::LDClient.new('sdkKey', config) @@ -83,7 +83,7 @@ module Integrations end it 'TestData.flag defaults to a boolean flag' do - td = TestData.new + td = TestDataImpl.new f = td.flag('flag').build(0) expect(f[:variations]).to eq([true, false]) expect(f[:fallthrough][:variation]).to eq(0) @@ -91,7 +91,7 @@ module Integrations end it 'TestData.flag returns a copy of the existing flag if it exists' do - td = TestData.new + td = TestDataImpl.new td.update(td.flag('flag').variation_for_all_users(true)) expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) @@ -104,7 +104,7 @@ module Integrations describe 'FlagBuilder' do it 'defaults to targeting on and sets the flag key' do - f = TestData::FlagBuilder.new('flag').build(1) + f = TestDataImpl::FlagBuilder.new('flag').build(1) expect(f[:key]).to eq('flag') expect(f[:version]).to eq(1) expect(f[:on]).to eq(true) @@ -112,63 +112,63 @@ module Integrations end it 'can set targeting off' do - f = TestData::FlagBuilder.new('flag').on(false).build(1) + f = TestDataImpl::FlagBuilder.new('flag').on(false).build(1) expect(f[:on]).to eq(false) end it 'can set fallthrough variation' do - f = TestData::FlagBuilder.new('flag').fallthrough_variation(0).build(1) + f = TestDataImpl::FlagBuilder.new('flag').fallthrough_variation(0).build(1) expect(f[:fallthrough][:variation]).to eq(0) end it 'can set variation for when targeting is off' do - f = TestData::FlagBuilder.new('flag').off_variation(0).build(1) + f = TestDataImpl::FlagBuilder.new('flag').off_variation(0).build(1) expect(f[:off_variation]).to eq(0) end it 'can set a list of variations' do - f = TestData::FlagBuilder.new('flag').variations(true, false).build(1) + f = TestDataImpl::FlagBuilder.new('flag').variations(true, false).build(1) expect(f[:variations]).to eq([true, false]) end it 'has the boolean_flag shortcut method' do - f = TestData::FlagBuilder.new('flag').boolean_flag.build(1) + f = TestDataImpl::FlagBuilder.new('flag').boolean_flag.build(1) expect(f[:variations]).to eq([true, false]) expect(f[:fallthrough][:variation]).to eq(0) expect(f[:off_variation]).to eq(1) end it 'can handle boolean or index variation' do - f = TestData::FlagBuilder.new('flag').off_variation(true).build(1) + f = TestDataImpl::FlagBuilder.new('flag').off_variation(true).build(1) expect(f[:variations]).to eq([true, false]) expect(f[:off_variation]).to eq(0) - f2 = TestData::FlagBuilder.new('flag').fallthrough_variation(true).build(1) + f2 = TestDataImpl::FlagBuilder.new('flag').fallthrough_variation(true).build(1) expect(f2[:variations]).to eq([true, false]) expect(f2[:off_variation]).to eq(1) end it 'can set variation for all users' do - f = TestData::FlagBuilder.new('flag').variation_for_all_users(true).build(1) + f = TestDataImpl::FlagBuilder.new('flag').variation_for_all_users(true).build(1) expect(f[:rules]).to be_nil expect(f[:targets]).to be_nil expect(f[:fallthrough][:variation]).to be(0) end it 'clears existing rules when setting variation for all users' do - f = TestData::FlagBuilder.new('flag') + f = TestDataImpl::FlagBuilder.new('flag') .if_match('name', 'ben') .then_return(false) .variation_for_user('ben', false) .variation_for_all_users(true).build(1) - expect(f[:rules]).to be_nil - expect(f[:targets]).to be_nil + expect(f.keys).to_not include(:rules) + expect(f.keys).to_not include(:targets) expect(f[:fallthrough][:variation]).to be(0) end it 'can set a variation for a specific user' do - f = TestData::FlagBuilder.new('flag') - .variation_for_user('ben', false) + f = TestDataImpl::FlagBuilder.new('flag') + .variation_for_user('ben', false) f2 = f.clone.variation_for_user('ben', true) expect(f.build(0)[:targets]).to eql([ { variation: 1, values: ['ben'] } ]) expect(f2.build(1)[:targets]).to_not include({ variation: 1, values: ['ben'] }) @@ -176,7 +176,7 @@ module Integrations end it 'can make an immutable copy of its self' do - fb = TestData::FlagBuilder.new('flag').variation_for_all_users(true) + fb = TestDataImpl::FlagBuilder.new('flag').variation_for_all_users(true) expect(fb.build(0)).to eql(fb.clone.build(0)) fcopy = fb.clone.variation_for_all_users(false).build(0) @@ -189,11 +189,11 @@ module Integrations end it 'can build rules based on attributes' do - f = TestData::FlagBuilder.new('flag') - .if_match('name', 'ben') - .and_not_match('country', 'fr') - .then_return(true) - .build(1) + f = TestDataImpl::FlagBuilder.new('flag') + .if_match('name', 'ben') + .and_not_match('country', 'fr') + .then_return(true) + .build(1) expect(f[:rules]).to eql([{ id: "rule0", variation: 0, From 8a408b200dcc3fbc90e12fcb41580de07a9385ce Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Tue, 23 Nov 2021 20:25:20 +0000 Subject: [PATCH 201/292] Add the doc comments --- .../impl/integrations/test_data_impl.rb | 286 ++++++++++++++++-- lib/ldclient-rb/integrations/test_data.rb | 32 ++ 2 files changed, 297 insertions(+), 21 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index 138fb1bf..016329a4 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -5,41 +5,74 @@ module LaunchDarkly module Impl module Integrations class TestDataImpl + def initialize + @flag_builders = Hash.new + @current_flags = Hash.new + @instances = Array.new + @instances_lock = Concurrent::ReadWriteLock.new + @lock = Concurrent::ReadWriteLock.new + end + + # + # Called internally by the SDK to determine what arguments to pass to call + # You do not need to call this method. + # def arity 2 end + # + # Called internally by the SDK to associate this test data source with an {@code LDClient} instance. + # You do not need to call this method. + # def call(_, config) impl = TestDataSource.new(config.feature_store, self) @instances_lock.with_write_lock { @instances.push(impl) } impl end - def make_init_data - { FEATURES => @current_flags } - end - - def closed_instance(instance) - @instances_lock.with_write_lock { @instances.delete(instance) } - end - - def initialize - @flag_builders = Hash.new - @current_flags = Hash.new - @instances = Array.new - @instances_lock = Concurrent::ReadWriteLock.new - @lock = Concurrent::ReadWriteLock.new - end - - def flag(flag_name) - existing_builder = @lock.with_read_lock { @flag_builders[flag_name] } + # + # Creates or copies a {@link FlagBuilder} for building a test flag configuration. + #

+ # If this flag key has already been defined in this {@code TestDataImpl} instance, then the builder + # starts with the same configuration that was last provided for this flag. + #

+ # Otherwise, it starts with a new default configuration in which the flag has {@code true} and + # {@code false} variations, is {@code true} for all users when targeting is turned on and + # {@code false} otherwise, and currently has targeting turned on. You can change any of those + # properties, and provide more complex behavior, using the {@link FlagBuilder} methods. + #

+ # Once you have set the desired configuration, pass the builder to {@link #update(FlagBuilder)}. + # + # @param key the flag key + # @return a flag configuration builder + # @see #update(FlagBuilder) + # + def flag(key) + existing_builder = @lock.with_read_lock { @flag_builders[key] } if existing_builder.nil? then - FlagBuilder.new(flag_name).boolean_flag + FlagBuilder.new(key).boolean_flag else existing_builder.clone end end + # + # Updates the test data with the specified flag configuration. + #

+ # This has the same effect as if a flag were added or modified on the LaunchDarkly dashboard. + # It immediately propagates the flag change to any {@code LDClient} instance(s) that you have + # already configured to use this {@code TestDataImpl}. If no {@code LDClient} has been started yet, + # it simply adds this flag to the test data which will be provided to any {@code LDClient} that + # you subsequently configure. + #

+ # Any subsequent changes to this {@link FlagBuilder} instance do not affect the test data, + # unless you call {@link #update(FlagBuilder)} again. + # + # @param flag_builder a flag configuration builder + # @return the same {@code TestDataImpl} instance + # @see #flag(String) + # def update(flag_builder) new_flag = nil @lock.with_write_lock do @@ -59,6 +92,14 @@ def update(flag_builder) end end + def make_init_data + { FEATURES => @current_flags } + end + + def closed_instance(instance) + @instances_lock.with_write_lock { @instances.delete(instance) } + end + class TestDataSource include LaunchDarkly::Interfaces::DataSource @@ -104,6 +145,12 @@ def initialize_copy(other) end end + # + # A builder for feature flag configurations to be used with {@link TestDataImpl}. + # + # @see TestDataImpl#flag(String) + # @see TestDataImpl#update(FlagBuilder) + # class FlagBuilder attr_reader :key @@ -120,11 +167,34 @@ def initialize_copy(other) @targets = @targets.nil? ? nil : @targets.clone end - def on(aBool) - @on = aBool + # + # Sets targeting to be on or off for this flag. + #

+ # The effect of this depends on the rest of the flag configuration, just as it does on the + # real LaunchDarkly dashboard. In the default configuration that you get from calling + # {@link TestDataImpl#flag(String)} with a new flag key, the flag will return {@code false} + # whenever targeting is off, and {@code true} when targeting is on. + # + # @param on true if targeting should be on + # @return the builder + # + def on(on) + @on = on self end + # + # Specifies the fallthrough variation. The fallthrough is the value + # that is returned if targeting is on and the user was not matched by a more specific + # target or rule. + #

+ # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation true or false or the desired fallthrough variation index: + # 0 for the first, 1 for the second, etc. + # @return the builder + # def fallthrough_variation(variation) if [true,false].include? variation then boolean_flag.fallthrough_variation(variation_for_boolean(variation)) @@ -134,6 +204,17 @@ def fallthrough_variation(variation) end end + # + # Specifies the off variation for a flag. This is the variation that is returned + # whenever targeting is off. + #

+ # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation true or false or the desired off variation index: + # 0 for the first, 1 for the second, etc. + # @return the builder + # def off_variation(variation) if [true,false].include? variation then boolean_flag.off_variation(variation_for_boolean(variation)) @@ -143,11 +224,34 @@ def off_variation(variation) end end + # + # Changes the allowable variation values for the flag. + #

+ # The value may be of any valid JSON type. For instance, a boolean flag + # normally has {@code true, false}; a string-valued flag might have + # {@code 'red', 'green'}; etc. + # + # @param variations the desired variations + # @return the builder + # def variations(*variations) @variations = variations self end + # + # Sets the flag to always return the specified variation for all users. + #

+ # The variation is specified, Targeting is switched on, and any existing targets or rules are removed. + # The fallthrough variation is set to the specified value. The off variation is left unchanged. + #

+ # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return the builder + # def variation_for_all_users(variation) if [true,false].include? variation then boolean_flag.variation_for_all_users(variation_for_boolean(variation)) @@ -156,10 +260,35 @@ def variation_for_all_users(variation) end end + # + # Sets the flag to always return the specified variation value for all users. + #

+ # The value may be of any valid JSON type. This method changes the + # flag to have only a single variation, which is this value, and to return the same + # variation regardless of whether targeting is on or off. Any existing targets or rules + # are removed. + # + # @param value the desired value to be returned for all users + # @return the builder + # def value_for_all_users(value) variations(value).variation_for_all_users(0) end + # + # Sets the flag to return the specified variation for a specific user key when targeting + # is on. + #

+ # This has no effect when targeting is turned off for the flag. + #

+ # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param user_key a user key + # @param variation true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return the builder + # def variation_for_user(user_key, variation) if [true,false].include? variation then boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) @@ -182,18 +311,65 @@ def variation_for_user(user_key, variation) end end + # + # Starts defining a flag rule, using the "is one of" operator. + #

+ # For example, this creates a rule that returns {@code true} if the name is "Patsy" or "Edina": + # + #


+          #     testData.flag("flag")
+          #         .if_match(:name, 'Patsy', 'Edina')
+          #         .then_return(true);
+          # 
+ # + # @param attribute the user attribute to match against + # @param values values to compare to + # @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#then_return(boolean|int)} + # to finish the rule, or add more tests with another method like + # {@link FlagRuleBuilder#and_match(UserAttribute, LDValue...)} + # def if_match(attribute, *values) FlagRuleBuilder.new(self).and_match(attribute, *values) end + + # + # Starts defining a flag rule, using the "is not one of" operator. + #

+ # For example, this creates a rule that returns {@code true} if the name is neither "Saffron" nor "Bubble": + # + #


+          #     testData.flag("flag")
+          #         .if_not_match(:name, 'Saffron', 'Bubble')
+          #         .then_return(true)
+          # 
+ # + # @param attribute the user attribute to match against + # @param values values to compare to + # @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#then_return(boolean|int)} + # to finish the rule, or add more tests with another method like + # {@link FlagRuleBuilder#and_match(UserAttribute, value...)} + # def if_not_match(attribute, *values) FlagRuleBuilder.new(self).and_not_match(attribute, *values) end + # + # Removes any existing user targets from the flag. This undoes the effect of methods like + # {@link #variation_for_user} + # + # @return the same builder + # def clear_user_targets @targets = nil self end + # + # Removes any existing rules from the flag. This undoes the effect of methods like + # {@link #if_match} + # + # @return the same builder + # def clear_rules @rules = nil self @@ -207,6 +383,16 @@ def add_rule(rule) self end + # + # A shortcut for setting the flag to use the standard boolean configuration. + #

+ # This is the default for all new flags created with {@link TestDataImpl#flag(String)}. The flag + # will have two variations, {@code true} and {@code false} (in that order); it will return + # {@code false} whenever targeting is off, and {@code true} when targeting is on if no other + # settings specify otherwise. + # + # @return the builder + # def boolean_flag if is_boolean_flag then self @@ -248,6 +434,19 @@ def build(version) res end + # + # A builder for feature flag rules to be used with {@link FlagBuilder}. + #

+ # In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of + # clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the + # rule's clauses match the user. + #

+ # To start defining a rule, use one of the flag builder's matching methods such as + # {@link FlagBuilder#if_match}. This defines the first clause for the rule. + # Optionally, you may add more clauses with the rule builder's methods such as + # {@link #and_match} or {@link #and_not_match}. + # Finally, call {@link #then_return} to finish defining the rule. + # class FlagRuleBuilder FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) @@ -261,6 +460,23 @@ def intialize_copy(other) @clauses = @clauses.clone end + # + # Adds another clause, using the "is one of" operator. + #

+ # For example, this creates a rule that returns {@code true} if the name is "Patsy" and the + # country is "gb": + # + #


+            #     testData.flag("flag")
+            #         .if_match(:name, 'Patsy')
+            #         .and_match(:country, 'gb')
+            #         .then_return(true)
+            # 
+ # + # @param attribute the user attribute to match against + # @param values values to compare to + # @return the rule builder + # def and_match(attribute, *values) @clauses.push(FlagRuleClause.new( attribute: attribute, @@ -271,6 +487,23 @@ def and_match(attribute, *values) self end + # + # Adds another clause, using the "is not one of" operator. + #

+ # For example, this creates a rule that returns {@code true} if the name is "Patsy" and the + # country is not "gb": + # + #


+            #     testData.flag("flag")
+            #         .if_match(:name, 'Patsy')
+            #         .and_not_match(:country, 'gb')
+            #         .then_return(true)
+            # 
+ # + # @param attribute the user attribute to match against + # @param values values to compare to + # @return the rule builder + # def and_not_match(attribute, *values) @clauses.push(FlagRuleClause.new( attribute: attribute, @@ -281,6 +514,17 @@ def and_not_match(attribute, *values) self end + # + # Finishes defining the rule, specifying the result as either a boolean + # or a variation index. + #

+ # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation true or false or the desired variation index: + # 0 for the first, 1 for the second, etc. + # @result the flag builder with this rule added + # def then_return(variation) if [true, false].include? variation then @variation = @flag_builder.variation_for_boolean(variation) diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 04af121c..03938bea 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -1,8 +1,40 @@ require 'ldclient-rb/impl/integrations/test_data_impl' +# +# A mechanism for providing dynamically updatable feature flag state in a simplified form to an SDK +# client in test scenarios. +#

+# Unlike {@link FileDataSource}, this mechanism does not use any external resources. It provides only +# the data that the application has put into it using the {@link #update(FlagBuilder)} method. +# +#


+#     td = LaunchDarkly::Integrations::TestData.factory
+#     td.update(td.flag("flag-key-1").variation_for_all_users(true))
+#     config = LaunchDarkly::Config.new(data_source: td)
+#     client = LaunchDarkly::LDClient.new('sdkKey', config)
+#     # flags can be updated at any time:
+#     td.update(td.flag("flag-key-2")
+#                 .variation_for_user("some-user-key", true)
+#                 .fallthrough_variation(false))
+# 
+# +# The above example uses a simple boolean flag, but more complex configurations are possible using +# the methods of the {@link FlagBuilder} that is returned by {@link #flag(String)}. {@link FlagBuilder} +# supports many of the ways a flag can be configured on the LaunchDarkly dashboard, but does not +# currently support 1. rule operators other than "in" and "not in", or 2. percentage rollouts. +#

+# If the same {@code TestData} instance is used to configure multiple {@code LDClient} instances, +# any changes made to the data will propagate to all of the {@code LDClient}s. +# module LaunchDarkly module Integrations module TestData + # Creates a new instance of the test data source. + #

+ # See {@link TestDataImpl} for details. + # + # @return a new configurable test data source + # def self.factory LaunchDarkly::Impl::Integrations::TestDataImpl.new end From 7c0541d20da3e7005d3b3f3c9aa88604ac36685c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 23 Nov 2021 13:52:33 -0800 Subject: [PATCH 202/292] (big segments 1) add public config/interface/reason types (#167) --- .gitignore | 3 +- lib/ldclient-rb/config.rb | 75 ++++++++++++++ lib/ldclient-rb/evaluation_detail.rb | 75 ++++++++++++-- lib/ldclient-rb/interfaces.rb | 145 +++++++++++++++++++++++++++ spec/evaluation_detail_spec.rb | 10 +- 5 files changed, 294 insertions(+), 14 deletions(-) diff --git a/.gitignore b/.gitignore index d327dbe8..d7b37d2f 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ mkmf.log *.gem .DS_Store -Gemfile.lock \ No newline at end of file +Gemfile.lock +.ruby-version diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 95cda71e..e84654ae 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -42,6 +42,7 @@ class Config # @option opts [String] :wrapper_name See {#wrapper_name}. # @option opts [String] :wrapper_version See {#wrapper_version}. # @option opts [#open] :socket_factory See {#socket_factory}. + # @option opts [BigSegmentsConfig] :big_segments See {#big_segments}. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -262,6 +263,16 @@ def offline? # attr_reader :data_source + # + # Configuration options related to Big Segments. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # @return [BigSegmentsConfig] + # + attr_reader :big_segments + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor @@ -484,4 +495,68 @@ def self.minimum_diagnostic_recording_interval 60 end end + + # + # Configuration options related to Big Segments. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # If your application uses Big Segments, you will need to create a `BigSegmentsConfig` that at a + # minimum specifies what database integration to use, and then pass the `BigSegmentsConfig` + # object as the `big_segments` parameter when creating a {Config}. + # + # @example Configuring Big Segments with Redis + # store = LaunchDarkly::Integrations::Redis::new_big_segments_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(big_segments: + # LaunchDarkly::BigSegmentsConfig.new(store: store)) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + class BigSegmentsConfig + DEFAULT_USER_CACHE_SIZE = 1000 + DEFAULT_USER_CACHE_TIME = 5 + DEFAULT_STATUS_POLL_INTERVAL = 5 + DEFAULT_STALE_AFTER = 2 * 60 + + # + # Constructor for setting Big Segments options. + # + # @param store [LaunchDarkly::Interfaces::BigSegmentStore] the data store implementation + # @param user_cache_size [Integer] See {#user_cache_size}. + # @param user_cache_time [Float] See {#user_cache_time}. + # @param status_poll_interval [Float] See {#status_poll_interval}. + # @param stale_after [Float] See {#stale_after}. + # + def initialize(store:, user_cache_size: nil, user_cache_time: nil, status_poll_interval: nil, stale_after: nil) + @store = store + @user_cache_size = user_cache_size.nil? ? DEFAULT_USER_CACHE_SIZE : user_cache_size + @user_cache_time = user_cache_time.nil? ? DEFAULT_USER_CACHE_TIME : user_cache_time + @status_poll_interval = status_poll_interval.nil? ? DEFAULT_STATUS_POLL_INTERVAL : status_poll_interval + @stale_after = stale_after.nil? ? DEFAULT_STALE_AFTER : stale_after + end + + # The implementation of {LaunchDarkly::Interfaces::BigSegmentStore} that will be used to + # query the Big Segments database. + # @return [LaunchDarkly::Interfaces::BigSegmentStore] + attr_reader :store + + # The maximum number of users whose Big Segment state will be cached by the SDK at any given time. + # @return [Integer] + attr_reader :user_cache_size + + # The maximum length of time (in seconds) that the Big Segment state for a user will be cached + # by the SDK. + # @return [Float] + attr_reader :user_cache_time + + # The interval (in seconds) at which the SDK will poll the Big Segment store to make sure it is + # available and to determine how long ago it was updated. + # @return [Float] + attr_reader :status_poll_interval + + # The maximum length of time between updates of the Big Segments data before the data is + # considered out of date. + # @return [Float] + attr_reader :stale_after + end end diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index 4eae67bc..676da7a3 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -110,27 +110,42 @@ class EvaluationReason # Indicates the general category of the reason. Will always be one of the class constants such # as {#OFF}. + # @return [Symbol] attr_reader :kind # The index of the rule that was matched (0 for the first rule in the feature flag). If # {#kind} is not {#RULE_MATCH}, this will be `nil`. + # @return [Integer|nil] attr_reader :rule_index # A unique string identifier for the matched rule, which will not change if other rules are added # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`. + # @return [String] attr_reader :rule_id # A boolean or nil value representing if the rule or fallthrough has an experiment rollout. + # @return [Boolean|nil] attr_reader :in_experiment # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not # {#PREREQUISITE_FAILED}, this will be `nil`. + # @return [String] attr_reader :prerequisite_key # A value indicating the general category of error. This should be one of the class constants such # as {#ERROR_FLAG_NOT_FOUND}. If {#kind} is not {#ERROR}, it will be `nil`. + # @return [Symbol] attr_reader :error_kind + # Describes the validity of Big Segment information, if and only if the flag evaluation required + # querying at least one Big Segment. Otherwise it returns `nil`. Possible values are defined by + # {BigSegmentsStatus}. + # + # Big Segments are a specific kind of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # @return [Symbol] + attr_reader :big_segments_status + # Returns an instance whose {#kind} is {#OFF}. # @return [EvaluationReason] def self.off @@ -196,11 +211,13 @@ def self.error(error_kind) def ==(other) if other.is_a? EvaluationReason @kind == other.kind && @rule_index == other.rule_index && @rule_id == other.rule_id && - @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind + @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind && + @big_segments_status == other.big_segments_status elsif other.is_a? Hash @kind.to_s == other[:kind] && @rule_index == other[:ruleIndex] && @rule_id == other[:ruleId] && @prerequisite_key == other[:prerequisiteKey] && - (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) + (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) && + (other[:bigSegmentsStatus] == @big_segments_status.nil? ? nil : @big_segments_status.to_s) end end @@ -242,7 +259,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json # enabled for a flag and the application called variation_detail, or 2. experimentation is # enabled for an evaluation. We can't reuse these hashes because an application could call # as_json and then modify the result. - case @kind + ret = case @kind when :RULE_MATCH if @in_experiment { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, inExperiment: @in_experiment } @@ -262,6 +279,10 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json else { kind: @kind } end + if !@big_segments_status.nil? + ret[:bigSegmentsStatus] = @big_segments_status + end + ret end # Same as {#as_json}, but converts the JSON structure into a string. @@ -285,14 +306,24 @@ def [](key) @prerequisite_key when :errorKind @error_kind.nil? ? nil : @error_kind.to_s + when :bigSegmentsStatus + @big_segments_status.nil? ? nil : @big_segments_status.to_s else nil end end - private + def with_big_segments_status(big_segments_status) + return self if @big_segments_status == big_segments_status + EvaluationReason.new(@kind, @rule_index, @rule_id, @prerequisite_key, @error_kind, @in_experiment, big_segments_status) + end - def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil) + # + # Constructor that sets all properties. Applications should not normally use this constructor, + # but should use class methods like {#off} to avoid creating unnecessary instances. + # + def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil, + big_segments_status = nil) @kind = kind.to_sym @rule_index = rule_index @rule_id = rule_id @@ -301,11 +332,10 @@ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_exper @prerequisite_key.freeze if !prerequisite_key.nil? @error_kind = error_kind @in_experiment = in_experiment + @big_segments_status = big_segments_status end - private_class_method :new - - def self.make_error(error_kind) + private_class_method def self.make_error(error_kind) new(:ERROR, nil, nil, nil, error_kind) end @@ -321,4 +351,33 @@ def self.make_error(error_kind) ERROR_EXCEPTION => make_error(ERROR_EXCEPTION) } end + + # + # Defines the possible values of {EvaluationReason#big_segments_status}. + # + module BigSegmentsStatus + # + # Indicates that the Big Segment query involved in the flag evaluation was successful, and + # that the segment state is considered up to date. + # + HEALTHY = :HEALTHY + + # + # Indicates that the Big Segment query involved in the flag evaluation was successful, but + # that the segment state may not be up to date. + # + STALE = :STALE + + # + # Indicates that Big Segments could not be queried for the flag evaluation because the SDK + # configuration did not include a Big Segment store. + # + NOT_CONFIGURED = :NOT_CONFIGURED + + # + # Indicates that the Big Segment query involved in the flag evaluation failed, for instance + # due to a database error. + # + STORE_ERROR = :STORE_ERROR + end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 9ea0932b..05b54d51 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -1,3 +1,4 @@ +require "observer" module LaunchDarkly # @@ -149,5 +150,149 @@ def start def stop end end + + module BigSegmentStore + # + # Returns information about the overall state of the store. This method will be called only + # when the SDK needs the latest state, so it should not be cached. + # + # @return [BigSegmentStoreMetadata] + # + def get_metadata + end + + # + # Queries the store for a snapshot of the current segment state for a specific user. + # + # The user_hash is a base64-encoded string produced by hashing the user key as defined by + # the Big Segments specification; the store implementation does not need to know the details + # of how this is done, because it deals only with already-hashed keys, but the string can be + # assumed to only contain characters that are valid in base64. + # + # The return value should be either a Hash, or nil if the user is not referenced in any big + # segments. Each key in the Hash is a "segment reference", which is how segments are + # identified in Big Segment data. This string is not identical to the segment key-- the SDK + # will add other information. The store implementation should not be concerned with the + # format of the string. Each value in the Hash is true if the user is explicitly included in + # the segment, false if the user is explicitly excluded from the segment-- and is not also + # explicitly included (that is, if both an include and an exclude existed in the data, the + # include would take precedence). If the user's status in a particular segment is undefined, + # there should be no key or value for that segment. + # + # This Hash may be cached by the SDK, so it should not be modified after it is created. It + # is a snapshot of the segment membership state at one point in time. + # + # @param user_hash [String] + # @return [Hash] true/false values for Big Segments that reference this user + # + def get_membership(user_hash) + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + # @return [void] + # + def stop + end + end + + # + # Values returned by {BigSegmentStore#get_metadata}. + # + class BigSegmentStoreMetadata + def initialize(last_up_to_date) + @last_up_to_date = last_up_to_date + end + + # The Unix epoch millisecond timestamp of the last update to the {BigSegmentStore}. It is + # nil if the store has never been updated. + # + # @return [Integer|nil] + attr_reader :last_up_to_date + end + + # + # Information about the status of a Big Segment store, provided by {BigSegmentStoreStatusProvider}. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + class BigSegmentStoreStatus + def initialize(available, stale) + @available = available + @stale = stale + end + + # True if the Big Segment store is able to respond to queries, so that the SDK can evaluate + # whether a user is in a segment or not. + # + # If this property is false, the store is not able to make queries (for instance, it may not have + # a valid database connection). In this case, the SDK will treat any reference to a Big Segment + # as if no users are included in that segment. Also, the {EvaluationReason} associated with + # with any flag evaluation that references a Big Segment when the store is not available will + # have a `big_segments_status` of `STORE_ERROR`. + # + # @return [Boolean] + attr_reader :available + + # True if the Big Segment store is available, but has not been updated within the amount of time + # specified by {BigSegmentsConfig#stale_after}. + # + # This may indicate that the LaunchDarkly Relay Proxy, which populates the store, has stopped + # running or has become unable to receive fresh data from LaunchDarkly. Any feature flag + # evaluations that reference a Big Segment will be using the last known data, which may be out + # of date. Also, the {EvaluationReason} associated with those evaluations will have a + # `big_segments_status` of `STALE`. + # + # @return [Boolean] + attr_reader :stale + end + + # + # An interface for querying the status of a Big Segment store. + # + # The Big Segment store is the component that receives information about Big Segments, normally + # from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type + # of user segments. For more information, read the LaunchDarkly documentation: + # https://docs.launchdarkly.com/home/users/big-segments + # + # An implementation of this interface is returned by {LDClient#big_segment_store_status_provider}. + # Application code never needs to implement this interface. + # + # There are two ways to interact with the status. One is to simply get the current status; if its + # `available` property is true, then the SDK is able to evaluate user membership in Big Segments, + # and the `stale`` property indicates whether the data might be out of date. + # + # The other way is to subscribe to status change notifications. Applications may wish to know if + # there is an outage in the Big Segment store, or if it has become stale (the Relay Proxy has + # stopped updating it with new data), since then flag evaluations that reference a Big Segment + # might return incorrect values. To allow finding out about status changes as soon as possible, + # `BigSegmentStoreStatusProvider` mixes in Ruby's + # [Observable](https://docs.ruby-lang.org/en/2.5.0/Observable.html) module to provide standard + # methods such as `add_observer`. Observers will be called with a new {BigSegmentStoreStatus} + # value whenever the status changes. + # + # @example Getting the current status + # status = client.big_segment_store_status_provider.status + # + # @example Subscribing to status notifications + # client.big_segment_store_status_provider.add_observer(self, :big_segments_status_changed) + # + # def big_segments_status_changed(new_status) + # puts "Big segment store status is now: #{new_status}" + # end + # + module BigSegmentStoreStatusProvider + include Observable + # + # Gets the current status of the store, if known. + # + # @return [BigSegmentStoreStatus] the status, or nil if the SDK has not yet queried the Big + # Segment store status + # + def status + end + end end end diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb index 3d7418ed..7b1b6856 100644 --- a/spec/evaluation_detail_spec.rb +++ b/spec/evaluation_detail_spec.rb @@ -41,7 +41,10 @@ module LaunchDarkly [ EvaluationReason::prerequisite_failed("x"), EvaluationReason::PREREQUISITE_FAILED, { "kind" => "PREREQUISITE_FAILED", "prerequisiteKey" => "x" }, "PREREQUISITE_FAILED(x)" ], [ EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND), EvaluationReason::ERROR, - { "kind" => "ERROR", "errorKind" => "FLAG_NOT_FOUND" }, "ERROR(FLAG_NOT_FOUND)" ] + { "kind" => "ERROR", "errorKind" => "FLAG_NOT_FOUND" }, "ERROR(FLAG_NOT_FOUND)" ], + [ EvaluationReason::fallthrough().with_big_segments_status(BigSegmentsStatus::HEALTHY), EvaluationReason::FALLTHROUGH, + { "kind" => "FALLTHROUGH", "bigSegmentsStatus" => "HEALTHY" }, "FALLTHROUGH", + [ EvaluationReason::fallthrough ] ], ] values.each_index do |i| params = values[i] @@ -108,6 +111,7 @@ module LaunchDarkly expect(EvaluationReason::rule_match(1, "x")[:ruleId]).to eq "x" expect(EvaluationReason::prerequisite_failed("x")[:prerequisiteKey]).to eq "x" expect(EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)[:errorKind]).to eq "FLAG_NOT_FOUND" + expect(EvaluationReason::fallthrough().with_big_segments_status(BigSegmentsStatus::HEALTHY)[:bigSegmentsStatus]).to eq "HEALTHY" end it "freezes string properties" do @@ -127,9 +131,5 @@ module LaunchDarkly expect { EvaluationReason::error(nil) }.to raise_error(ArgumentError) expect { EvaluationReason::error(9) }.to raise_error(ArgumentError) end - - it "does not allow direct access to constructor" do - expect { EvaluationReason.new(:off, nil, nil, nil, nil) }.to raise_error(NoMethodError) - end end end From 28776f3c44131c8d5be8beded7853285e1df870a Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Wed, 24 Nov 2021 14:35:39 +0000 Subject: [PATCH 203/292] Cleanup docstrings to be YARD docs --- .../impl/integrations/test_data_impl.rb | 214 +++++++++--------- 1 file changed, 106 insertions(+), 108 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index 016329a4..c2531b7d 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -5,6 +5,7 @@ module LaunchDarkly module Impl module Integrations class TestDataImpl + # @private def initialize @flag_builders = Hash.new @current_flags = Hash.new @@ -32,21 +33,20 @@ def call(_, config) end # - # Creates or copies a {@link FlagBuilder} for building a test flag configuration. - #

- # If this flag key has already been defined in this {@code TestDataImpl} instance, then the builder + # Creates or copies a {FlagBuilder} for building a test flag configuration. + # + # If this flag key has already been defined in this `TestDataImpl` instance, then the builder # starts with the same configuration that was last provided for this flag. - #

- # Otherwise, it starts with a new default configuration in which the flag has {@code true} and - # {@code false} variations, is {@code true} for all users when targeting is turned on and - # {@code false} otherwise, and currently has targeting turned on. You can change any of those - # properties, and provide more complex behavior, using the {@link FlagBuilder} methods. - #

- # Once you have set the desired configuration, pass the builder to {@link #update(FlagBuilder)}. # - # @param key the flag key - # @return a flag configuration builder - # @see #update(FlagBuilder) + # Otherwise, it starts with a new default configuration in which the flag has `true` and + # `false variations, is `true` for all users when targeting is turned on and + # `false` otherwise, and currently has targeting turned on. You can change any of those + # properties, and provide more complex behavior, using the {FlagBuilder} methods. + # + # Once you have set the desired configuration, pass the builder to {#update}. + # + # @param key [String] the flag key + # @return [FlagBuilder] a flag configuration builder # def flag(key) existing_builder = @lock.with_read_lock { @flag_builders[key] } @@ -59,19 +59,18 @@ def flag(key) # # Updates the test data with the specified flag configuration. - #

+ # # This has the same effect as if a flag were added or modified on the LaunchDarkly dashboard. - # It immediately propagates the flag change to any {@code LDClient} instance(s) that you have - # already configured to use this {@code TestDataImpl}. If no {@code LDClient} has been started yet, - # it simply adds this flag to the test data which will be provided to any {@code LDClient} that + # It immediately propagates the flag change to any `LDClient` instance(s) that you have + # already configured to use this `TestDataImpl`. If no `LDClient` has been started yet, + # it simply adds this flag to the test data which will be provided to any `LDClient` that # you subsequently configure. - #

- # Any subsequent changes to this {@link FlagBuilder} instance do not affect the test data, - # unless you call {@link #update(FlagBuilder)} again. # - # @param flag_builder a flag configuration builder - # @return the same {@code TestDataImpl} instance - # @see #flag(String) + # Any subsequent changes to this {FlagBuilder} instance do not affect the test data, + # unless you call {#update} again. + # + # @param flag_builder [FlagBuilder] a flag configuration builder + # @return [TestDataImpl] the same `TestDataImpl` instance # def update(flag_builder) new_flag = nil @@ -100,6 +99,7 @@ def closed_instance(instance) @instances_lock.with_write_lock { @instances.delete(instance) } end + # @private class TestDataSource include LaunchDarkly::Interfaces::DataSource @@ -146,20 +146,22 @@ def initialize_copy(other) end # - # A builder for feature flag configurations to be used with {@link TestDataImpl}. + # A builder for feature flag configurations to be used with {TestDataImpl}. # - # @see TestDataImpl#flag(String) - # @see TestDataImpl#update(FlagBuilder) + # @see TestDataImpl#flag + # @see TestDataImpl#update # class FlagBuilder attr_reader :key + # @private def initialize(key) @key = key @on = true @variations = [] end + # @private def initialize_copy(other) super(other) @variations = @variations.clone @@ -169,14 +171,14 @@ def initialize_copy(other) # # Sets targeting to be on or off for this flag. - #

+ # # The effect of this depends on the rest of the flag configuration, just as it does on the # real LaunchDarkly dashboard. In the default configuration that you get from calling - # {@link TestDataImpl#flag(String)} with a new flag key, the flag will return {@code false} - # whenever targeting is off, and {@code true} when targeting is on. + # {TestDataImpl#flag} with a new flag key, the flag will return `false` + # whenever targeting is off, and `true` when targeting is on. # - # @param on true if targeting should be on - # @return the builder + # @param on [Boolean] true if targeting should be on + # @return [FlagBuilder] the builder # def on(on) @on = on @@ -187,11 +189,11 @@ def on(on) # Specifies the fallthrough variation. The fallthrough is the value # that is returned if targeting is on and the user was not matched by a more specific # target or rule. - #

+ # # If the flag was previously configured with other variations and the variation specified is a boolean, # this also changes it to a boolean flag. # - # @param variation true or false or the desired fallthrough variation index: + # @param variation [Boolean, Integer] true or false or the desired fallthrough variation index: # 0 for the first, 1 for the second, etc. # @return the builder # @@ -207,13 +209,13 @@ def fallthrough_variation(variation) # # Specifies the off variation for a flag. This is the variation that is returned # whenever targeting is off. - #

+ # # If the flag was previously configured with other variations and the variation specified is a boolean, # this also changes it to a boolean flag. # - # @param variation true or false or the desired off variation index: + # @param variation [Boolean, Integer] true or false or the desired off variation index: # 0 for the first, 1 for the second, etc. - # @return the builder + # @return [FlagBuilder] the builder # def off_variation(variation) if [true,false].include? variation then @@ -226,13 +228,13 @@ def off_variation(variation) # # Changes the allowable variation values for the flag. - #

+ # # The value may be of any valid JSON type. For instance, a boolean flag - # normally has {@code true, false}; a string-valued flag might have - # {@code 'red', 'green'}; etc. + # normally has `true, false`; a string-valued flag might have + # `'red', 'green'`; etc. # - # @param variations the desired variations - # @return the builder + # @param *variations [Array] the desired variations + # @return [FlagBuilder] the builder # def variations(*variations) @variations = variations @@ -241,16 +243,16 @@ def variations(*variations) # # Sets the flag to always return the specified variation for all users. - #

+ # # The variation is specified, Targeting is switched on, and any existing targets or rules are removed. # The fallthrough variation is set to the specified value. The off variation is left unchanged. - #

+ # # If the flag was previously configured with other variations and the variation specified is a boolean, # this also changes it to a boolean flag. # - # @param variation true or false or the desired variation index to return: + # @param variation [Boolean, Integer] true or false or the desired variation index to return: # 0 for the first, 1 for the second, etc. - # @return the builder + # @return [FlagBuilder] the builder # def variation_for_all_users(variation) if [true,false].include? variation then @@ -262,14 +264,14 @@ def variation_for_all_users(variation) # # Sets the flag to always return the specified variation value for all users. - #

+ # # The value may be of any valid JSON type. This method changes the # flag to have only a single variation, which is this value, and to return the same # variation regardless of whether targeting is on or off. Any existing targets or rules # are removed. # - # @param value the desired value to be returned for all users - # @return the builder + # @param value [Object] the desired value to be returned for all users + # @return [FlagBuilder] the builder # def value_for_all_users(value) variations(value).variation_for_all_users(0) @@ -278,16 +280,16 @@ def value_for_all_users(value) # # Sets the flag to return the specified variation for a specific user key when targeting # is on. - #

+ # # This has no effect when targeting is turned off for the flag. - #

+ # # If the flag was previously configured with other variations and the variation specified is a boolean, # this also changes it to a boolean flag. # - # @param user_key a user key - # @param variation true or false or the desired variation index to return: + # @param user_key [String] a user key + # @param variation [Boolean, Integer] true or false or the desired variation index to return: # 0 for the first, 1 for the second, etc. - # @return the builder + # @return [FlagBuilder] the builder # def variation_for_user(user_key, variation) if [true,false].include? variation then @@ -313,20 +315,19 @@ def variation_for_user(user_key, variation) # # Starts defining a flag rule, using the "is one of" operator. - #

- # For example, this creates a rule that returns {@code true} if the name is "Patsy" or "Edina": # - #


+          # @example create a rule that returns `true` if the name is "Patsy" or "Edina"
           #     testData.flag("flag")
           #         .if_match(:name, 'Patsy', 'Edina')
           #         .then_return(true);
-          # 
# - # @param attribute the user attribute to match against - # @param values values to compare to - # @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#then_return(boolean|int)} - # to finish the rule, or add more tests with another method like - # {@link FlagRuleBuilder#and_match(UserAttribute, LDValue...)} + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see {FlagRuleBuilder#then_return} call to finish the rule + # @see {FlagRuleBuilder#and_match} add more tests + # @see {FlagRuleBuilder#and_not_match} add more tests # def if_match(attribute, *values) FlagRuleBuilder.new(self).and_match(attribute, *values) @@ -334,30 +335,29 @@ def if_match(attribute, *values) # # Starts defining a flag rule, using the "is not one of" operator. - #

- # For example, this creates a rule that returns {@code true} if the name is neither "Saffron" nor "Bubble": # - #


+          # @example create a rule that returns `true` if the name is neither "Saffron" nor "Bubble"
           #     testData.flag("flag")
           #         .if_not_match(:name, 'Saffron', 'Bubble')
           #         .then_return(true)
-          # 
# - # @param attribute the user attribute to match against - # @param values values to compare to - # @return a {@link FlagRuleBuilder}; call {@link FlagRuleBuilder#then_return(boolean|int)} - # to finish the rule, or add more tests with another method like - # {@link FlagRuleBuilder#and_match(UserAttribute, value...)} + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see {FlagRuleBuilder#then_return} call to finish the rule + # @see {FlagRuleBuilder#and_match} add more tests + # @see {FlagRuleBuilder#and_not_match} add more tests # def if_not_match(attribute, *values) FlagRuleBuilder.new(self).and_not_match(attribute, *values) end # - # Removes any existing user targets from the flag. This undoes the effect of methods like - # {@link #variation_for_user} + # Removes any existing user targets from the flag. + # This undoes the effect of methods like {#variation_for_user} # - # @return the same builder + # @return [FlagBuilder] the same builder # def clear_user_targets @targets = nil @@ -365,16 +365,17 @@ def clear_user_targets end # - # Removes any existing rules from the flag. This undoes the effect of methods like - # {@link #if_match} + # Removes any existing rules from the flag. + # This undoes the effect of methods like {#if_match} # - # @return the same builder + # @return [FlagBuilder] the same builder # def clear_rules @rules = nil self end + # @private def add_rule(rule) if @rules.nil? then @rules = DeepCopyArray.new @@ -385,13 +386,13 @@ def add_rule(rule) # # A shortcut for setting the flag to use the standard boolean configuration. - #

- # This is the default for all new flags created with {@link TestDataImpl#flag(String)}. The flag - # will have two variations, {@code true} and {@code false} (in that order); it will return - # {@code false} whenever targeting is off, and {@code true} when targeting is on if no other - # settings specify otherwise. # - # @return the builder + # This is the default for all new flags created with {TestDataImpl#flag}. + # The flag will have two variations, `true` and `false` (in that order); + # it will return `false` whenever targeting is off, and `true` when targeting is on + # if no other settings specify otherwise. + # + # @return [FlagBuilder] the builder # def boolean_flag if is_boolean_flag then @@ -403,6 +404,7 @@ def boolean_flag end end + # @private def build(version) res = { key: @key, version: version, @@ -435,26 +437,28 @@ def build(version) end # - # A builder for feature flag rules to be used with {@link FlagBuilder}. - #

+ # A builder for feature flag rules to be used with {FlagBuilder}. + # # In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of # clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the # rule's clauses match the user. - #

+ # # To start defining a rule, use one of the flag builder's matching methods such as - # {@link FlagBuilder#if_match}. This defines the first clause for the rule. + # {FlagBuilder#if_match}. This defines the first clause for the rule. # Optionally, you may add more clauses with the rule builder's methods such as - # {@link #and_match} or {@link #and_not_match}. - # Finally, call {@link #then_return} to finish defining the rule. + # {#and_match} or {#and_not_match}. + # Finally, call {#then_return} to finish defining the rule. # class FlagRuleBuilder FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) + # @private def initialize(flag_builder) @flag_builder = flag_builder @clauses = Array.new end + # @private def intialize_copy(other) super(other) @clauses = @clauses.clone @@ -462,20 +466,16 @@ def intialize_copy(other) # # Adds another clause, using the "is one of" operator. - #

- # For example, this creates a rule that returns {@code true} if the name is "Patsy" and the - # country is "gb": # - #


+            # @example create a rule that returns `true` if the name is "Patsy" and the country is "gb"
             #     testData.flag("flag")
             #         .if_match(:name, 'Patsy')
             #         .and_match(:country, 'gb')
             #         .then_return(true)
-            # 
# - # @param attribute the user attribute to match against - # @param values values to compare to - # @return the rule builder + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder # def and_match(attribute, *values) @clauses.push(FlagRuleClause.new( @@ -489,20 +489,16 @@ def and_match(attribute, *values) # # Adds another clause, using the "is not one of" operator. - #

- # For example, this creates a rule that returns {@code true} if the name is "Patsy" and the - # country is not "gb": # - #


+            # @example create a rule that returns `true` if the name is "Patsy" and the country is not "gb"
             #     testData.flag("flag")
             #         .if_match(:name, 'Patsy')
             #         .and_not_match(:country, 'gb')
             #         .then_return(true)
-            # 
# - # @param attribute the user attribute to match against - # @param values values to compare to - # @return the rule builder + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder # def and_not_match(attribute, *values) @clauses.push(FlagRuleClause.new( @@ -517,13 +513,13 @@ def and_not_match(attribute, *values) # # Finishes defining the rule, specifying the result as either a boolean # or a variation index. - #

+ # # If the flag was previously configured with other variations and the variation specified is a boolean, # this also changes it to a boolean flag. # - # @param variation true or false or the desired variation index: + # @param variation [Boolean, Integer] true or false or the desired variation index: # 0 for the first, 1 for the second, etc. - # @result the flag builder with this rule added + # @result [FlagBuilder] the flag builder with this rule added # def then_return(variation) if [true, false].include? variation then @@ -535,6 +531,7 @@ def then_return(variation) end end + # @private def build(ri) { id: 'rule' + ri.to_s, @@ -544,6 +541,7 @@ def build(ri) end end + # @private def variation_for_boolean(variation) variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX end From ce5de8915f65948b719a38e9bf628f773dccc6c3 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Wed, 24 Nov 2021 14:53:37 +0000 Subject: [PATCH 204/292] Added Util.is_bool helper function to clean up the check for whether an object is a boolean; Removed the DeepCopyHash/DeepCopyArray objects in favor of deep_copy_hash and deep_copy_array functions --- .../impl/integrations/test_data_impl.rb | 65 +++++++++---------- lib/ldclient-rb/impl/util.rb | 5 +- 2 files changed, 34 insertions(+), 36 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb index fa5eef8e..544c1fef 100644 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ b/lib/ldclient-rb/impl/integrations/test_data_impl.rb @@ -1,25 +1,7 @@ - module LaunchDarkly module Impl module Integrations class TestData - - class DeepCopyHash < Hash - def initialize_copy(other) - other.each do | key, value | - self[key] = value.clone - end - end - end - - class DeepCopyArray < Array - def initialize_copy(other) - other.each do | value | - self.push(value.clone) - end - end - end - class FlagBuilder def initialize(key) @key = key @@ -30,8 +12,8 @@ def initialize(key) def initialize_copy(other) super(other) @variations = @variations.clone - @rules = @rules.nil? ? nil : @rules.clone - @targets = @targets.nil? ? nil : @targets.clone + @rules = @rules.nil? ? nil : deep_copy_array(@rules) + @targets = @targets.nil? ? nil : deep_copy_hash(@targets) end def on(aBool) @@ -40,7 +22,7 @@ def on(aBool) end def fallthrough_variation(variation) - if [true,false].include? variation then + if Util.is_bool variation then boolean_flag.fallthrough_variation(variation_for_boolean(variation)) else @fallthrough_variation = variation @@ -49,7 +31,7 @@ def fallthrough_variation(variation) end def off_variation(variation) - if [true,false].include? variation then + if Util.is_bool variation then boolean_flag.off_variation(variation_for_boolean(variation)) else @off_variation = variation @@ -63,7 +45,7 @@ def variations(*variations) end def variation_for_all_users(variation) - if [true,false].include? variation then + if Util.is_bool variation then boolean_flag.variation_for_all_users(variation_for_boolean(variation)) else on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) @@ -75,11 +57,11 @@ def value_for_all_users(value) end def variation_for_user(user_key, variation) - if [true,false].include? variation then + if Util.is_bool variation then boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) else if @targets.nil? then - @targets = DeepCopyHash.new + @targets = Hash.new end @variations.count.times do | i | if i == variation then @@ -99,6 +81,7 @@ def variation_for_user(user_key, variation) def if_match(attribute, *values) FlagRuleBuilder.new(self).and_match(attribute, *values) end + def if_not_match(attribute, *values) FlagRuleBuilder.new(self).and_not_match(attribute, *values) end @@ -115,7 +98,7 @@ def clear_rules def add_rule(rule) if @rules.nil? then - @rules = DeepCopyArray.new + @rules = Array.new end @rules.push(rule) self @@ -166,7 +149,7 @@ def build(version) class FlagRuleBuilder def initialize(flag_builder) @flag_builder = flag_builder - @clauses = DeepCopyArray.new + @clauses = Array.new end def initialize_copy(other) @@ -195,7 +178,7 @@ def and_not_match(attribute, *values) end def then_return(variation) - if [true, false].include? variation then + if Util.is_bool variation then @variation = @flag_builder.variation_for_boolean(variation) @flag_builder.boolean_flag.add_rule(self) else @@ -217,16 +200,28 @@ def variation_for_boolean(variation) variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX end + private - TRUE_VARIATION_INDEX = 0 - FALSE_VARIATION_INDEX = 1 + TRUE_VARIATION_INDEX = 0 + FALSE_VARIATION_INDEX = 1 - def is_boolean_flag - @variations.size == 2 && - @variations[TRUE_VARIATION_INDEX] == true && - @variations[FALSE_VARIATION_INDEX] == false - end + def is_boolean_flag + @variations.size == 2 && + @variations[TRUE_VARIATION_INDEX] == true && + @variations[FALSE_VARIATION_INDEX] == false + end + + def deep_copy_hash(from) + to = Hash.new + from.each { |k, v| to[k] = v.clone } + to + end + def deep_copy_array(from) + to = Array.new + from.each { |v| to.push(v.clone) } + to + end end end end diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index d1197afe..5fe93a2b 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -1,7 +1,10 @@ - module LaunchDarkly module Impl module Util + def self.is_bool(aObject) + [true,false].include? aObject + end + def self.current_time_millis (Time.now.to_f * 1000).to_i end From d9f9af57a0b41b7dafb44cae13029c219763dba4 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Wed, 24 Nov 2021 15:16:13 +0000 Subject: [PATCH 205/292] Move public classes out of Impl namespace. Most of it is in public namespace except for the data source now. --- .../test_data/test_data_source.rb | 40 ++ .../impl/integrations/test_data_impl.rb | 559 ------------------ lib/ldclient-rb/impl/util.rb | 4 - lib/ldclient-rb/integrations.rb | 4 +- lib/ldclient-rb/integrations/test_data.rb | 161 +++-- .../integrations/test_data/flag_builder.rb | 432 ++++++++++++++ lib/ldclient-rb/util.rb | 6 +- spec/impl/integrations/test_data_impl_spec.rb | 219 ------- spec/integrations/test_data_spec.rb | 211 +++++++ 9 files changed, 818 insertions(+), 818 deletions(-) create mode 100644 lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb delete mode 100644 lib/ldclient-rb/impl/integrations/test_data_impl.rb create mode 100644 lib/ldclient-rb/integrations/test_data/flag_builder.rb delete mode 100644 spec/impl/integrations/test_data_impl_spec.rb create mode 100644 spec/integrations/test_data_spec.rb diff --git a/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb b/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb new file mode 100644 index 00000000..b201c26e --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb @@ -0,0 +1,40 @@ +require 'concurrent/atomics' +require 'ldclient-rb/interfaces' + +module LaunchDarkly + module Impl + module Integrations + module TestData + # @private + class TestDataSource + include LaunchDarkly::Interfaces::DataSource + + def initialize(feature_store, test_data) + @feature_store = feature_store + @test_data = test_data + end + + def initialized? + true + end + + def start + ready = Concurrent::Event.new + ready.set + init_data = @test_data.make_init_data + @feature_store.init(init_data) + ready + end + + def stop + @test_data.closed_instance(self) + end + + def upsert(new_flag) + @feature_store.upsert(FEATURES, new_flag) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/integrations/test_data_impl.rb b/lib/ldclient-rb/impl/integrations/test_data_impl.rb deleted file mode 100644 index acb8dbbb..00000000 --- a/lib/ldclient-rb/impl/integrations/test_data_impl.rb +++ /dev/null @@ -1,559 +0,0 @@ -require 'concurrent/atomics' -require 'ldclient-rb/interfaces' - -module LaunchDarkly - module Impl - module Integrations - class TestDataImpl - # @private - def initialize - @flag_builders = Hash.new - @current_flags = Hash.new - @instances = Array.new - @instances_lock = Concurrent::ReadWriteLock.new - @lock = Concurrent::ReadWriteLock.new - end - - # - # Called internally by the SDK to determine what arguments to pass to call - # You do not need to call this method. - # - def arity - 2 - end - - # - # Called internally by the SDK to associate this test data source with an {@code LDClient} instance. - # You do not need to call this method. - # - def call(_, config) - impl = TestDataSource.new(config.feature_store, self) - @instances_lock.with_write_lock { @instances.push(impl) } - impl - end - - # - # Creates or copies a {FlagBuilder} for building a test flag configuration. - # - # If this flag key has already been defined in this `TestDataImpl` instance, then the builder - # starts with the same configuration that was last provided for this flag. - # - # Otherwise, it starts with a new default configuration in which the flag has `true` and - # `false variations, is `true` for all users when targeting is turned on and - # `false` otherwise, and currently has targeting turned on. You can change any of those - # properties, and provide more complex behavior, using the {FlagBuilder} methods. - # - # Once you have set the desired configuration, pass the builder to {#update}. - # - # @param key [String] the flag key - # @return [FlagBuilder] a flag configuration builder - # - def flag(key) - existing_builder = @lock.with_read_lock { @flag_builders[key] } - if existing_builder.nil? then - FlagBuilder.new(key).boolean_flag - else - existing_builder.clone - end - end - - # - # Updates the test data with the specified flag configuration. - # - # This has the same effect as if a flag were added or modified on the LaunchDarkly dashboard. - # It immediately propagates the flag change to any `LDClient` instance(s) that you have - # already configured to use this `TestDataImpl`. If no `LDClient` has been started yet, - # it simply adds this flag to the test data which will be provided to any `LDClient` that - # you subsequently configure. - # - # Any subsequent changes to this {FlagBuilder} instance do not affect the test data, - # unless you call {#update} again. - # - # @param flag_builder [FlagBuilder] a flag configuration builder - # @return [TestDataImpl] the same `TestDataImpl` instance - # - def update(flag_builder) - new_flag = nil - @lock.with_write_lock do - @flag_builders[flag_builder.key] = flag_builder - version = 0 - flag_key = flag_builder.key.to_sym - if @current_flags[flag_key] then - version = @current_flags[flag_key][:version] - end - new_flag = flag_builder.build(version+1) - @current_flags[flag_key] = new_flag - end - @instances_lock.with_read_lock do - @instances.each do | instance | - instance.upsert(new_flag) - end - end - end - - def make_init_data - { FEATURES => @current_flags } - end - - def closed_instance(instance) - @instances_lock.with_write_lock { @instances.delete(instance) } - end - - # @private - class TestDataSource - include LaunchDarkly::Interfaces::DataSource - - def initialize(feature_store, test_data) - @feature_store = feature_store - @test_data = test_data - end - - def initialized? - true - end - - def start - ready = Concurrent::Event.new - ready.set - init_data = @test_data.make_init_data - @feature_store.init(init_data) - ready - end - - def stop - @test_data.closed_instance(self) - end - - def upsert(new_flag) - @feature_store.upsert(FEATURES, new_flag) - end - end - - # - # A builder for feature flag configurations to be used with {TestDataImpl}. - # - # @see TestDataImpl#flag - # @see TestDataImpl#update - # - class FlagBuilder - attr_reader :key - - # @private - def initialize(key) - @key = key - @on = true - @variations = [] - end - - # @private - def initialize_copy(other) - super(other) - @variations = @variations.clone - @rules = @rules.nil? ? nil : deep_copy_array(@rules) - @targets = @targets.nil? ? nil : deep_copy_hash(@targets) - end - - # - # Sets targeting to be on or off for this flag. - # - # The effect of this depends on the rest of the flag configuration, just as it does on the - # real LaunchDarkly dashboard. In the default configuration that you get from calling - # {TestDataImpl#flag} with a new flag key, the flag will return `false` - # whenever targeting is off, and `true` when targeting is on. - # - # @param on [Boolean] true if targeting should be on - # @return [FlagBuilder] the builder - # - def on(on) - @on = on - self - end - - # - # Specifies the fallthrough variation. The fallthrough is the value - # that is returned if targeting is on and the user was not matched by a more specific - # target or rule. - # - # If the flag was previously configured with other variations and the variation specified is a boolean, - # this also changes it to a boolean flag. - # - # @param variation [Boolean, Integer] true or false or the desired fallthrough variation index: - # 0 for the first, 1 for the second, etc. - # @return the builder - # - def fallthrough_variation(variation) - if Util.is_bool variation then - boolean_flag.fallthrough_variation(variation_for_boolean(variation)) - else - @fallthrough_variation = variation - self - end - end - - # - # Specifies the off variation for a flag. This is the variation that is returned - # whenever targeting is off. - # - # If the flag was previously configured with other variations and the variation specified is a boolean, - # this also changes it to a boolean flag. - # - # @param variation [Boolean, Integer] true or false or the desired off variation index: - # 0 for the first, 1 for the second, etc. - # @return [FlagBuilder] the builder - # - def off_variation(variation) - if Util.is_bool variation then - boolean_flag.off_variation(variation_for_boolean(variation)) - else - @off_variation = variation - self - end - end - - # - # Changes the allowable variation values for the flag. - # - # The value may be of any valid JSON type. For instance, a boolean flag - # normally has `true, false`; a string-valued flag might have - # `'red', 'green'`; etc. - # - # @param *variations [Array] the desired variations - # @return [FlagBuilder] the builder - # - def variations(*variations) - @variations = variations - self - end - - # - # Sets the flag to always return the specified variation for all users. - # - # The variation is specified, Targeting is switched on, and any existing targets or rules are removed. - # The fallthrough variation is set to the specified value. The off variation is left unchanged. - # - # If the flag was previously configured with other variations and the variation specified is a boolean, - # this also changes it to a boolean flag. - # - # @param variation [Boolean, Integer] true or false or the desired variation index to return: - # 0 for the first, 1 for the second, etc. - # @return [FlagBuilder] the builder - # - def variation_for_all_users(variation) - if Util.is_bool variation then - boolean_flag.variation_for_all_users(variation_for_boolean(variation)) - else - on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) - end - end - - # - # Sets the flag to always return the specified variation value for all users. - # - # The value may be of any valid JSON type. This method changes the - # flag to have only a single variation, which is this value, and to return the same - # variation regardless of whether targeting is on or off. Any existing targets or rules - # are removed. - # - # @param value [Object] the desired value to be returned for all users - # @return [FlagBuilder] the builder - # - def value_for_all_users(value) - variations(value).variation_for_all_users(0) - end - - # - # Sets the flag to return the specified variation for a specific user key when targeting - # is on. - # - # This has no effect when targeting is turned off for the flag. - # - # If the flag was previously configured with other variations and the variation specified is a boolean, - # this also changes it to a boolean flag. - # - # @param user_key [String] a user key - # @param variation [Boolean, Integer] true or false or the desired variation index to return: - # 0 for the first, 1 for the second, etc. - # @return [FlagBuilder] the builder - # - def variation_for_user(user_key, variation) - if Util.is_bool variation then - boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) - else - if @targets.nil? then - @targets = Hash.new - end - @variations.count.times do | i | - if i == variation then - if @targets[i].nil? then - @targets[i] = [user_key] - else - @targets[i].push(user_key) - end - elsif not @targets[i].nil? then - @targets[i].delete(user_key) - end - end - self - end - end - - # - # Starts defining a flag rule, using the "is one of" operator. - # - # @example create a rule that returns `true` if the name is "Patsy" or "Edina" - # testData.flag("flag") - # .if_match(:name, 'Patsy', 'Edina') - # .then_return(true); - # - # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to - # @return [FlagRuleBuilder] a flag rule builder - # - # @see {FlagRuleBuilder#then_return} call to finish the rule - # @see {FlagRuleBuilder#and_match} add more tests - # @see {FlagRuleBuilder#and_not_match} add more tests - # - def if_match(attribute, *values) - FlagRuleBuilder.new(self).and_match(attribute, *values) - end - - # - # Starts defining a flag rule, using the "is not one of" operator. - # - # @example create a rule that returns `true` if the name is neither "Saffron" nor "Bubble" - # testData.flag("flag") - # .if_not_match(:name, 'Saffron', 'Bubble') - # .then_return(true) - # - # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to - # @return [FlagRuleBuilder] a flag rule builder - # - # @see {FlagRuleBuilder#then_return} call to finish the rule - # @see {FlagRuleBuilder#and_match} add more tests - # @see {FlagRuleBuilder#and_not_match} add more tests - # - def if_not_match(attribute, *values) - FlagRuleBuilder.new(self).and_not_match(attribute, *values) - end - - # - # Removes any existing user targets from the flag. - # This undoes the effect of methods like {#variation_for_user} - # - # @return [FlagBuilder] the same builder - # - def clear_user_targets - @targets = nil - self - end - - # - # Removes any existing rules from the flag. - # This undoes the effect of methods like {#if_match} - # - # @return [FlagBuilder] the same builder - # - def clear_rules - @rules = nil - self - end - - # @private - def add_rule(rule) - if @rules.nil? then - @rules = Array.new - end - @rules.push(rule) - self - end - - # - # A shortcut for setting the flag to use the standard boolean configuration. - # - # This is the default for all new flags created with {TestDataImpl#flag}. - # The flag will have two variations, `true` and `false` (in that order); - # it will return `false` whenever targeting is off, and `true` when targeting is on - # if no other settings specify otherwise. - # - # @return [FlagBuilder] the builder - # - def boolean_flag - if is_boolean_flag then - self - else - variations(true, false) - .fallthrough_variation(TRUE_VARIATION_INDEX) - .off_variation(FALSE_VARIATION_INDEX) - end - end - - # @private - def build(version) - res = { key: @key, - version: version, - on: @on, - } - - unless @off_variation.nil? then - res[:off_variation] = @off_variation - end - - unless @fallthrough_variation.nil? then - res[:fallthrough] = { variation: @fallthrough_variation } - end - - unless @variations.nil? then - res[:variations] = @variations - end - - unless @targets.nil? then - res[:targets] = @targets.collect do | variation, values | - { variation: variation, values: values } - end - end - - unless @rules.nil? then - res[:rules] = @rules.each_with_index.collect { | rule, i | rule.build(i) } - end - - res - end - - # - # A builder for feature flag rules to be used with {FlagBuilder}. - # - # In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of - # clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the - # rule's clauses match the user. - # - # To start defining a rule, use one of the flag builder's matching methods such as - # {FlagBuilder#if_match}. This defines the first clause for the rule. - # Optionally, you may add more clauses with the rule builder's methods such as - # {#and_match} or {#and_not_match}. - # Finally, call {#then_return} to finish defining the rule. - # - class FlagRuleBuilder - FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) - - # @private - def initialize(flag_builder) - @flag_builder = flag_builder - @clauses = Array.new - end - - # @private - def intialize_copy(other) - super(other) - @clauses = @clauses.clone - end - - # - # Adds another clause, using the "is one of" operator. - # - # @example create a rule that returns `true` if the name is "Patsy" and the country is "gb" - # testData.flag("flag") - # .if_match(:name, 'Patsy') - # .and_match(:country, 'gb') - # .then_return(true) - # - # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to - # @return [FlagRuleBuilder] the rule builder - # - def and_match(attribute, *values) - @clauses.push(FlagRuleClause.new( - attribute: attribute, - op: 'in', - values: values, - negate: false - )) - self - end - - # - # Adds another clause, using the "is not one of" operator. - # - # @example create a rule that returns `true` if the name is "Patsy" and the country is not "gb" - # testData.flag("flag") - # .if_match(:name, 'Patsy') - # .and_not_match(:country, 'gb') - # .then_return(true) - # - # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to - # @return [FlagRuleBuilder] the rule builder - # - def and_not_match(attribute, *values) - @clauses.push(FlagRuleClause.new( - attribute: attribute, - op: 'in', - values: values, - negate: true - )) - self - end - - # - # Finishes defining the rule, specifying the result as either a boolean - # or a variation index. - # - # If the flag was previously configured with other variations and the variation specified is a boolean, - # this also changes it to a boolean flag. - # - # @param variation [Boolean, Integer] true or false or the desired variation index: - # 0 for the first, 1 for the second, etc. - # @result [FlagBuilder] the flag builder with this rule added - # - def then_return(variation) - if Util.is_bool variation then - @variation = @flag_builder.variation_for_boolean(variation) - @flag_builder.boolean_flag.add_rule(self) - else - @variation = variation - @flag_builder.add_rule(self) - end - end - - # @private - def build(ri) - { - id: 'rule' + ri.to_s, - variation: @variation, - clauses: @clauses.collect(&:to_h) - } - end - end - - # @private - def variation_for_boolean(variation) - variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX - end - - - private - TRUE_VARIATION_INDEX = 0 - FALSE_VARIATION_INDEX = 1 - - def is_boolean_flag - @variations.size == 2 && - @variations[TRUE_VARIATION_INDEX] == true && - @variations[FALSE_VARIATION_INDEX] == false - end - - def deep_copy_hash(from) - to = Hash.new - from.each { |k, v| to[k] = v.clone } - to - end - - def deep_copy_array(from) - to = Array.new - from.each { |v| to.push(v.clone) } - to - end - end - end - end - end -end diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index 5fe93a2b..fe82cea1 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -1,10 +1,6 @@ module LaunchDarkly module Impl module Util - def self.is_bool(aObject) - [true,false].include? aObject - end - def self.current_time_millis (Time.now.to_f * 1000).to_i end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index e161e18b..a4b5f789 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -54,8 +54,8 @@ module Util end - module TestData - # code is in ldclient-rb/impl/integrations/test_data_impl + class TestData + # code is in ldclient-rb/integrations/test_data end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 03938bea..5522bb0a 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -1,42 +1,137 @@ -require 'ldclient-rb/impl/integrations/test_data_impl' - -# -# A mechanism for providing dynamically updatable feature flag state in a simplified form to an SDK -# client in test scenarios. -#

-# Unlike {@link FileDataSource}, this mechanism does not use any external resources. It provides only -# the data that the application has put into it using the {@link #update(FlagBuilder)} method. -# -#


-#     td = LaunchDarkly::Integrations::TestData.factory
-#     td.update(td.flag("flag-key-1").variation_for_all_users(true))
-#     config = LaunchDarkly::Config.new(data_source: td)
-#     client = LaunchDarkly::LDClient.new('sdkKey', config)
-#     # flags can be updated at any time:
-#     td.update(td.flag("flag-key-2")
-#                 .variation_for_user("some-user-key", true)
-#                 .fallthrough_variation(false))
-# 
-# -# The above example uses a simple boolean flag, but more complex configurations are possible using -# the methods of the {@link FlagBuilder} that is returned by {@link #flag(String)}. {@link FlagBuilder} -# supports many of the ways a flag can be configured on the LaunchDarkly dashboard, but does not -# currently support 1. rule operators other than "in" and "not in", or 2. percentage rollouts. -#

-# If the same {@code TestData} instance is used to configure multiple {@code LDClient} instances, -# any changes made to the data will propagate to all of the {@code LDClient}s. -# +require 'ldclient-rb/impl/integrations/test_data/test_data_source' +require 'ldclient-rb/integrations/test_data/flag_builder' + module LaunchDarkly module Integrations - module TestData + # + # A mechanism for providing dynamically updatable feature flag state in a simplified form to an SDK + # client in test scenarios. + # + # Unlike {FileDataSource}, this mechanism does not use any external resources. It provides only + # the data that the application has put into it using the {#update} method. + # + # @example + # td = LaunchDarkly::Integrations::TestData.factory + # td.update(td.flag("flag-key-1").variation_for_all_users(true)) + # config = LaunchDarkly::Config.new(data_source: td) + # client = LaunchDarkly::LDClient.new('sdkKey', config) + # # flags can be updated at any time: + # td.update(td.flag("flag-key-2") + # .variation_for_user("some-user-key", true) + # .fallthrough_variation(false)) + # + # The above example uses a simple boolean flag, but more complex configurations are possible using + # the methods of the {FlagBuilder} that is returned by {#flag}. {FlagBuilder} + # supports many of the ways a flag can be configured on the LaunchDarkly dashboard, but does not + # currently support 1. rule operators other than "in" and "not in", or 2. percentage rollouts. + # + # If the same `TestData` instance is used to configure multiple `LDClient` instances, + # any changes made to the data will propagate to all of the `LDClient`s. + # + class TestData # Creates a new instance of the test data source. - #

- # See {@link TestDataImpl} for details. + # # # @return a new configurable test data source # - def self.factory - LaunchDarkly::Impl::Integrations::TestDataImpl.new + def self.data_source + self.new + end + + # @private + def initialize + @flag_builders = Hash.new + @current_flags = Hash.new + @instances = Array.new + @instances_lock = Concurrent::ReadWriteLock.new + @lock = Concurrent::ReadWriteLock.new + end + + # + # Called internally by the SDK to determine what arguments to pass to call + # You do not need to call this method. + # + def arity + 2 + end + + # + # Called internally by the SDK to associate this test data source with an {@code LDClient} instance. + # You do not need to call this method. + # + def call(_, config) + impl = LaunchDarkly::Impl::Integrations::TestData::TestDataSource.new(config.feature_store, self) + @instances_lock.with_write_lock { @instances.push(impl) } + impl + end + + # + # Creates or copies a {FlagBuilder} for building a test flag configuration. + # + # If this flag key has already been defined in this `TestData` instance, then the builder + # starts with the same configuration that was last provided for this flag. + # + # Otherwise, it starts with a new default configuration in which the flag has `true` and + # `false variations, is `true` for all users when targeting is turned on and + # `false` otherwise, and currently has targeting turned on. You can change any of those + # properties, and provide more complex behavior, using the {FlagBuilder} methods. + # + # Once you have set the desired configuration, pass the builder to {#update}. + # + # @param key [String] the flag key + # @return [FlagBuilder] a flag configuration builder + # + def flag(key) + existing_builder = @lock.with_read_lock { @flag_builders[key] } + if existing_builder.nil? then + FlagBuilder.new(key).boolean_flag + else + existing_builder.clone + end + end + + # + # Updates the test data with the specified flag configuration. + # + # This has the same effect as if a flag were added or modified on the LaunchDarkly dashboard. + # It immediately propagates the flag change to any `LDClient` instance(s) that you have + # already configured to use this `TestData`. If no `LDClient` has been started yet, + # it simply adds this flag to the test data which will be provided to any `LDClient` that + # you subsequently configure. + # + # Any subsequent changes to this {FlagBuilder} instance do not affect the test data, + # unless you call {#update} again. + # + # @param flag_builder [FlagBuilder] a flag configuration builder + # @return [TestData] the same `TestData` instance + # + def update(flag_builder) + new_flag = nil + @lock.with_write_lock do + @flag_builders[flag_builder.key] = flag_builder + version = 0 + flag_key = flag_builder.key.to_sym + if @current_flags[flag_key] then + version = @current_flags[flag_key][:version] + end + new_flag = flag_builder.build(version+1) + @current_flags[flag_key] = new_flag + end + @instances_lock.with_read_lock do + @instances.each do | instance | + instance.upsert(new_flag) + end + end + end + + # @private + def make_init_data + { FEATURES => @current_flags } + end + + # @private + def closed_instance(instance) + @instances_lock.with_write_lock { @instances.delete(instance) } end end end diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb new file mode 100644 index 00000000..f2256c33 --- /dev/null +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -0,0 +1,432 @@ +require 'ldclient-rb/util' + +module LaunchDarkly + module Integrations + class TestData + # + # A builder for feature flag configurations to be used with {TestDataImpl}. + # + # @see TestDataImpl#flag + # @see TestDataImpl#update + # + class FlagBuilder + attr_reader :key + + # @private + def initialize(key) + @key = key + @on = true + @variations = [] + end + + # @private + def initialize_copy(other) + super(other) + @variations = @variations.clone + @rules = @rules.nil? ? nil : deep_copy_array(@rules) + @targets = @targets.nil? ? nil : deep_copy_hash(@targets) + end + + # + # Sets targeting to be on or off for this flag. + # + # The effect of this depends on the rest of the flag configuration, just as it does on the + # real LaunchDarkly dashboard. In the default configuration that you get from calling + # {TestDataImpl#flag} with a new flag key, the flag will return `false` + # whenever targeting is off, and `true` when targeting is on. + # + # @param on [Boolean] true if targeting should be on + # @return [FlagBuilder] the builder + # + def on(on) + @on = on + self + end + + # + # Specifies the fallthrough variation. The fallthrough is the value + # that is returned if targeting is on and the user was not matched by a more specific + # target or rule. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired fallthrough variation index: + # 0 for the first, 1 for the second, etc. + # @return the builder + # + def fallthrough_variation(variation) + if LaunchDarkly::Util.is_bool variation then + boolean_flag.fallthrough_variation(variation_for_boolean(variation)) + else + @fallthrough_variation = variation + self + end + end + + # + # Specifies the off variation for a flag. This is the variation that is returned + # whenever targeting is off. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired off variation index: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def off_variation(variation) + if LaunchDarkly::Util.is_bool variation then + boolean_flag.off_variation(variation_for_boolean(variation)) + else + @off_variation = variation + self + end + end + + # + # Changes the allowable variation values for the flag. + # + # The value may be of any valid JSON type. For instance, a boolean flag + # normally has `true, false`; a string-valued flag might have + # `'red', 'green'`; etc. + # + # @param *variations [Array] the desired variations + # @return [FlagBuilder] the builder + # + def variations(*variations) + @variations = variations + self + end + + # + # Sets the flag to always return the specified variation for all users. + # + # The variation is specified, Targeting is switched on, and any existing targets or rules are removed. + # The fallthrough variation is set to the specified value. The off variation is left unchanged. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def variation_for_all_users(variation) + if LaunchDarkly::Util.is_bool variation then + boolean_flag.variation_for_all_users(variation_for_boolean(variation)) + else + on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) + end + end + + # + # Sets the flag to always return the specified variation value for all users. + # + # The value may be of any valid JSON type. This method changes the + # flag to have only a single variation, which is this value, and to return the same + # variation regardless of whether targeting is on or off. Any existing targets or rules + # are removed. + # + # @param value [Object] the desired value to be returned for all users + # @return [FlagBuilder] the builder + # + def value_for_all_users(value) + variations(value).variation_for_all_users(0) + end + + # + # Sets the flag to return the specified variation for a specific user key when targeting + # is on. + # + # This has no effect when targeting is turned off for the flag. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param user_key [String] a user key + # @param variation [Boolean, Integer] true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def variation_for_user(user_key, variation) + if LaunchDarkly::Util.is_bool variation then + boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) + else + if @targets.nil? then + @targets = Hash.new + end + @variations.count.times do | i | + if i == variation then + if @targets[i].nil? then + @targets[i] = [user_key] + else + @targets[i].push(user_key) + end + elsif not @targets[i].nil? then + @targets[i].delete(user_key) + end + end + self + end + end + + # + # Starts defining a flag rule, using the "is one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" or "Edina" + # testData.flag("flag") + # .if_match(:name, 'Patsy', 'Edina') + # .then_return(true); + # + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see {FlagRuleBuilder#then_return} call to finish the rule + # @see {FlagRuleBuilder#and_match} add more tests + # @see {FlagRuleBuilder#and_not_match} add more tests + # + def if_match(attribute, *values) + FlagRuleBuilder.new(self).and_match(attribute, *values) + end + + # + # Starts defining a flag rule, using the "is not one of" operator. + # + # @example create a rule that returns `true` if the name is neither "Saffron" nor "Bubble" + # testData.flag("flag") + # .if_not_match(:name, 'Saffron', 'Bubble') + # .then_return(true) + # + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see {FlagRuleBuilder#then_return} call to finish the rule + # @see {FlagRuleBuilder#and_match} add more tests + # @see {FlagRuleBuilder#and_not_match} add more tests + # + def if_not_match(attribute, *values) + FlagRuleBuilder.new(self).and_not_match(attribute, *values) + end + + # + # Removes any existing user targets from the flag. + # This undoes the effect of methods like {#variation_for_user} + # + # @return [FlagBuilder] the same builder + # + def clear_user_targets + @targets = nil + self + end + + # + # Removes any existing rules from the flag. + # This undoes the effect of methods like {#if_match} + # + # @return [FlagBuilder] the same builder + # + def clear_rules + @rules = nil + self + end + + # @private + def add_rule(rule) + if @rules.nil? then + @rules = Array.new + end + @rules.push(rule) + self + end + + # + # A shortcut for setting the flag to use the standard boolean configuration. + # + # This is the default for all new flags created with {TestDataImpl#flag}. + # The flag will have two variations, `true` and `false` (in that order); + # it will return `false` whenever targeting is off, and `true` when targeting is on + # if no other settings specify otherwise. + # + # @return [FlagBuilder] the builder + # + def boolean_flag + if is_boolean_flag then + self + else + variations(true, false) + .fallthrough_variation(TRUE_VARIATION_INDEX) + .off_variation(FALSE_VARIATION_INDEX) + end + end + + # @private + def build(version) + res = { key: @key, + version: version, + on: @on, + } + + unless @off_variation.nil? then + res[:off_variation] = @off_variation + end + + unless @fallthrough_variation.nil? then + res[:fallthrough] = { variation: @fallthrough_variation } + end + + unless @variations.nil? then + res[:variations] = @variations + end + + unless @targets.nil? then + res[:targets] = @targets.collect do | variation, values | + { variation: variation, values: values } + end + end + + unless @rules.nil? then + res[:rules] = @rules.each_with_index.collect { | rule, i | rule.build(i) } + end + + res + end + + # + # A builder for feature flag rules to be used with {FlagBuilder}. + # + # In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of + # clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the + # rule's clauses match the user. + # + # To start defining a rule, use one of the flag builder's matching methods such as + # {FlagBuilder#if_match}. This defines the first clause for the rule. + # Optionally, you may add more clauses with the rule builder's methods such as + # {#and_match} or {#and_not_match}. + # Finally, call {#then_return} to finish defining the rule. + # + class FlagRuleBuilder + FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) + + # @private + def initialize(flag_builder) + @flag_builder = flag_builder + @clauses = Array.new + end + + # @private + def intialize_copy(other) + super(other) + @clauses = @clauses.clone + end + + # + # Adds another clause, using the "is one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" and the country is "gb" + # testData.flag("flag") + # .if_match(:name, 'Patsy') + # .and_match(:country, 'gb') + # .then_return(true) + # + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder + # + def and_match(attribute, *values) + @clauses.push(FlagRuleClause.new( + attribute: attribute, + op: 'in', + values: values, + negate: false + )) + self + end + + # + # Adds another clause, using the "is not one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" and the country is not "gb" + # testData.flag("flag") + # .if_match(:name, 'Patsy') + # .and_not_match(:country, 'gb') + # .then_return(true) + # + # @param attribute [Symbol] the user attribute to match against + # @param *values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder + # + def and_not_match(attribute, *values) + @clauses.push(FlagRuleClause.new( + attribute: attribute, + op: 'in', + values: values, + negate: true + )) + self + end + + # + # Finishes defining the rule, specifying the result as either a boolean + # or a variation index. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param variation [Boolean, Integer] true or false or the desired variation index: + # 0 for the first, 1 for the second, etc. + # @result [FlagBuilder] the flag builder with this rule added + # + def then_return(variation) + if LaunchDarkly::Util.is_bool variation then + @variation = @flag_builder.variation_for_boolean(variation) + @flag_builder.boolean_flag.add_rule(self) + else + @variation = variation + @flag_builder.add_rule(self) + end + end + + # @private + def build(ri) + { + id: 'rule' + ri.to_s, + variation: @variation, + clauses: @clauses.collect(&:to_h) + } + end + end + + # @private + def variation_for_boolean(variation) + variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX + end + + private + + TRUE_VARIATION_INDEX = 0 + FALSE_VARIATION_INDEX = 1 + + def is_boolean_flag + @variations.size == 2 && + @variations[TRUE_VARIATION_INDEX] == true && + @variations[FALSE_VARIATION_INDEX] == false + end + + def deep_copy_hash(from) + to = Hash.new + from.each { |k, v| to[k] = v.clone } + to + end + + def deep_copy_array(from) + to = Array.new + from.each { |v| to.push(v.clone) } + to + end + end + end + end +end diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index cfd09d8d..a82590b8 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -4,6 +4,10 @@ module LaunchDarkly # @private module Util + def self.is_bool(aObject) + [true,false].include? aObject + end + def self.stringify_attrs(hash, attrs) return hash if hash.nil? ret = hash @@ -18,7 +22,7 @@ def self.stringify_attrs(hash, attrs) end ret end - + def self.new_http_client(uri_s, config) http_client_options = {} if config.socket_factory diff --git a/spec/impl/integrations/test_data_impl_spec.rb b/spec/impl/integrations/test_data_impl_spec.rb deleted file mode 100644 index 6d89b31a..00000000 --- a/spec/impl/integrations/test_data_impl_spec.rb +++ /dev/null @@ -1,219 +0,0 @@ -require "ldclient-rb/integrations/test_data" -require "ldclient-rb/cache_store" -require "ldclient-rb/interfaces" -require "ldclient-rb/in_memory_store" -require "ldclient-rb/config" -require "ldclient-rb/events" -require "ldclient-rb/ldclient" - -module LaunchDarkly - module Impl - module Integrations - describe 'TestData' do - it 'is a valid datasource' do - td = LaunchDarkly::Integrations::TestData.factory - config = LaunchDarkly::Config.new(send_events: false, data_source: td) - client = LaunchDarkly::LDClient.new('sdkKey', config) - expect(config.feature_store.all(LaunchDarkly::FEATURES)).to eql({}) - client.close - end - - it 'initializes the feature store with existing flags' do - td = LaunchDarkly::Integrations::TestData.factory - td.update(td.flag('flag')) - config = LaunchDarkly::Config.new(send_events: false, data_source: td) - client = LaunchDarkly::LDClient.new('sdkKey', config) - expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ - key: 'flag', - variations: [true, false], - fallthrough: { variation: 0 }, - off_variation: 1, - on: true, - version: 1 - }) - client.close - end - - it 'updates the feature store with new flags' do - td = LaunchDarkly::Integrations::TestData.factory - td.update(td.flag('flag')) - config = LaunchDarkly::Config.new(send_events: false, data_source: td) - client = LaunchDarkly::LDClient.new('sdkKey', config) - config2 = LaunchDarkly::Config.new(send_events: false, data_source: td) - client2 = LaunchDarkly::LDClient.new('sdkKey', config2) - - expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ - key: 'flag', - variations: [true, false], - fallthrough: { variation: 0 }, - off_variation: 1, - on: true, - version: 1 - }) - expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ - key: 'flag', - variations: [true, false], - fallthrough: { variation: 0 }, - off_variation: 1, - on: true, - version: 1 - }) - - td.update(td.flag('flag').variation_for_all_users(false)) - - expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ - key: 'flag', - variations: [true, false], - fallthrough: { variation: 1 }, - off_variation: 1, - on: true, - version: 2 - }) - expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ - key: 'flag', - variations: [true, false], - fallthrough: { variation: 1 }, - off_variation: 1, - on: true, - version: 2 - }) - - client.close - client2.close - end - - it 'TestData.flag defaults to a boolean flag' do - td = TestDataImpl.new - f = td.flag('flag').build(0) - expect(f[:variations]).to eq([true, false]) - expect(f[:fallthrough][:variation]).to eq(0) - expect(f[:off_variation]).to eq(1) - end - - it 'TestData.flag returns a copy of the existing flag if it exists' do - td = TestDataImpl.new - td.update(td.flag('flag').variation_for_all_users(true)) - expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) - - #modify the flag but dont call update - td.flag('flag').variation_for_all_users(false).build(0) - - expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) - end - - describe 'FlagBuilder' do - - it 'defaults to targeting on and sets the flag key' do - f = TestDataImpl::FlagBuilder.new('flag').build(1) - expect(f[:key]).to eq('flag') - expect(f[:version]).to eq(1) - expect(f[:on]).to eq(true) - expect(f[:variations]).to be_empty - end - - it 'can set targeting off' do - f = TestDataImpl::FlagBuilder.new('flag').on(false).build(1) - expect(f[:on]).to eq(false) - end - - it 'can set fallthrough variation' do - f = TestDataImpl::FlagBuilder.new('flag').fallthrough_variation(0).build(1) - expect(f[:fallthrough][:variation]).to eq(0) - end - - it 'can set variation for when targeting is off' do - f = TestDataImpl::FlagBuilder.new('flag').off_variation(0).build(1) - expect(f[:off_variation]).to eq(0) - end - - it 'can set a list of variations' do - f = TestDataImpl::FlagBuilder.new('flag').variations(true, false).build(1) - expect(f[:variations]).to eq([true, false]) - end - - it 'has the boolean_flag shortcut method' do - f = TestDataImpl::FlagBuilder.new('flag').boolean_flag.build(1) - expect(f[:variations]).to eq([true, false]) - expect(f[:fallthrough][:variation]).to eq(0) - expect(f[:off_variation]).to eq(1) - end - - it 'can handle boolean or index variation' do - f = TestDataImpl::FlagBuilder.new('flag').off_variation(true).build(1) - expect(f[:variations]).to eq([true, false]) - expect(f[:off_variation]).to eq(0) - - f2 = TestDataImpl::FlagBuilder.new('flag').fallthrough_variation(true).build(1) - expect(f2[:variations]).to eq([true, false]) - expect(f2[:off_variation]).to eq(1) - end - - it 'can set variation for all users' do - f = TestDataImpl::FlagBuilder.new('flag').variation_for_all_users(true).build(1) - expect(f[:rules]).to be_nil - expect(f[:targets]).to be_nil - expect(f[:fallthrough][:variation]).to be(0) - end - - it 'clears existing rules when setting variation for all users' do - f = TestDataImpl::FlagBuilder.new('flag') - .if_match('name', 'ben') - .then_return(false) - .variation_for_user('ben', false) - .variation_for_all_users(true).build(1) - expect(f.keys).to_not include(:rules) - expect(f.keys).to_not include(:targets) - expect(f[:fallthrough][:variation]).to be(0) - end - - it 'can set a variation for a specific user' do - f = TestDataImpl::FlagBuilder.new('flag') - .variation_for_user('ben', false) - f2 = f.clone.variation_for_user('ben', true) - expect(f.build(0)[:targets]).to eql([ { variation: 1, values: ['ben'] } ]) - expect(f2.build(1)[:targets]).to_not include({ variation: 1, values: ['ben'] }) - expect(f2.build(1)[:targets]).to include({ variation: 0, values: ['ben'] }) - end - - it 'can make an immutable copy of its self' do - fb = TestDataImpl::FlagBuilder.new('flag').variation_for_all_users(true) - expect(fb.build(0)).to eql(fb.clone.build(0)) - - fcopy = fb.clone.variation_for_all_users(false).build(0) - f = fb.build(0) - - expect(f[:key]).to eql(fcopy[:key]) - expect(f[:variations]).to eql(fcopy[:variations]) - expect(f[:fallthrough][:variation]).to be(0) - expect(fcopy[:fallthrough][:variation]).to be(1) - end - - it 'can build rules based on attributes' do - f = TestDataImpl::FlagBuilder.new('flag') - .if_match('name', 'ben') - .and_not_match('country', 'fr') - .then_return(true) - .build(1) - expect(f[:rules]).to eql([{ - id: "rule0", - variation: 0, - clauses: [{ - attribute: 'name', - op: 'in', - values: ['ben'], - negate: false, - }, - { - attribute: 'country', - op: 'in', - values: ['fr'], - negate: true, - } - ] - }]) - end - end - end - end - end -end diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb new file mode 100644 index 00000000..8d3ba024 --- /dev/null +++ b/spec/integrations/test_data_spec.rb @@ -0,0 +1,211 @@ +require "ldclient-rb" + +module LaunchDarkly + module Integrations + describe 'TestData' do + it 'is a valid datasource' do + td = LaunchDarkly::Integrations::TestData.data_source + config = LaunchDarkly::Config.new(send_events: false, data_source: td) + client = LaunchDarkly::LDClient.new('sdkKey', config) + expect(config.feature_store.all(LaunchDarkly::FEATURES)).to eql({}) + client.close + end + + it 'initializes the feature store with existing flags' do + td = LaunchDarkly::Integrations::TestData.data_source + td.update(td.flag('flag')) + config = LaunchDarkly::Config.new(send_events: false, data_source: td) + client = LaunchDarkly::LDClient.new('sdkKey', config) + expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + off_variation: 1, + on: true, + version: 1 + }) + client.close + end + + it 'updates the feature store with new flags' do + td = LaunchDarkly::Integrations::TestData.data_source + td.update(td.flag('flag')) + config = LaunchDarkly::Config.new(send_events: false, data_source: td) + client = LaunchDarkly::LDClient.new('sdkKey', config) + config2 = LaunchDarkly::Config.new(send_events: false, data_source: td) + client2 = LaunchDarkly::LDClient.new('sdkKey', config2) + + expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + off_variation: 1, + on: true, + version: 1 + }) + expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 0 }, + off_variation: 1, + on: true, + version: 1 + }) + + td.update(td.flag('flag').variation_for_all_users(false)) + + expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 1 }, + off_variation: 1, + on: true, + version: 2 + }) + expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + key: 'flag', + variations: [true, false], + fallthrough: { variation: 1 }, + off_variation: 1, + on: true, + version: 2 + }) + + client.close + client2.close + end + + it 'TestData.flag defaults to a boolean flag' do + td = TestData.new + f = td.flag('flag').build(0) + expect(f[:variations]).to eq([true, false]) + expect(f[:fallthrough][:variation]).to eq(0) + expect(f[:off_variation]).to eq(1) + end + + it 'TestData.flag returns a copy of the existing flag if it exists' do + td = TestData.new + td.update(td.flag('flag').variation_for_all_users(true)) + expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) + + #modify the flag but dont call update + td.flag('flag').variation_for_all_users(false).build(0) + + expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) + end + + describe 'FlagBuilder' do + + it 'defaults to targeting on and sets the flag key' do + f = TestData::FlagBuilder.new('flag').build(1) + expect(f[:key]).to eq('flag') + expect(f[:version]).to eq(1) + expect(f[:on]).to eq(true) + expect(f[:variations]).to be_empty + end + + it 'can set targeting off' do + f = TestData::FlagBuilder.new('flag').on(false).build(1) + expect(f[:on]).to eq(false) + end + + it 'can set fallthrough variation' do + f = TestData::FlagBuilder.new('flag').fallthrough_variation(0).build(1) + expect(f[:fallthrough][:variation]).to eq(0) + end + + it 'can set variation for when targeting is off' do + f = TestData::FlagBuilder.new('flag').off_variation(0).build(1) + expect(f[:off_variation]).to eq(0) + end + + it 'can set a list of variations' do + f = TestData::FlagBuilder.new('flag').variations(true, false).build(1) + expect(f[:variations]).to eq([true, false]) + end + + it 'has the boolean_flag shortcut method' do + f = TestData::FlagBuilder.new('flag').boolean_flag.build(1) + expect(f[:variations]).to eq([true, false]) + expect(f[:fallthrough][:variation]).to eq(0) + expect(f[:off_variation]).to eq(1) + end + + it 'can handle boolean or index variation' do + f = TestData::FlagBuilder.new('flag').off_variation(true).build(1) + expect(f[:variations]).to eq([true, false]) + expect(f[:off_variation]).to eq(0) + + f2 = TestData::FlagBuilder.new('flag').fallthrough_variation(true).build(1) + expect(f2[:variations]).to eq([true, false]) + expect(f2[:off_variation]).to eq(1) + end + + it 'can set variation for all users' do + f = TestData::FlagBuilder.new('flag').variation_for_all_users(true).build(1) + expect(f[:rules]).to be_nil + expect(f[:targets]).to be_nil + expect(f[:fallthrough][:variation]).to be(0) + end + + it 'clears existing rules when setting variation for all users' do + f = TestData::FlagBuilder.new('flag') + .if_match('name', 'ben') + .then_return(false) + .variation_for_user('ben', false) + .variation_for_all_users(true).build(1) + expect(f.keys).to_not include(:rules) + expect(f.keys).to_not include(:targets) + expect(f[:fallthrough][:variation]).to be(0) + end + + it 'can set a variation for a specific user' do + f = TestData::FlagBuilder.new('flag') + .variation_for_user('ben', false) + f2 = f.clone.variation_for_user('ben', true) + expect(f.build(0)[:targets]).to eql([ { variation: 1, values: ['ben'] } ]) + expect(f2.build(1)[:targets]).to_not include({ variation: 1, values: ['ben'] }) + expect(f2.build(1)[:targets]).to include({ variation: 0, values: ['ben'] }) + end + + it 'can make an immutable copy of its self' do + fb = TestData::FlagBuilder.new('flag').variation_for_all_users(true) + expect(fb.build(0)).to eql(fb.clone.build(0)) + + fcopy = fb.clone.variation_for_all_users(false).build(0) + f = fb.build(0) + + expect(f[:key]).to eql(fcopy[:key]) + expect(f[:variations]).to eql(fcopy[:variations]) + expect(f[:fallthrough][:variation]).to be(0) + expect(fcopy[:fallthrough][:variation]).to be(1) + end + + it 'can build rules based on attributes' do + f = TestData::FlagBuilder.new('flag') + .if_match('name', 'ben') + .and_not_match('country', 'fr') + .then_return(true) + .build(1) + expect(f[:rules]).to eql([{ + id: "rule0", + variation: 0, + clauses: [{ + attribute: 'name', + op: 'in', + values: ['ben'], + negate: false, + }, + { + attribute: 'country', + op: 'in', + values: ['fr'], + negate: true, + } + ] + }]) + end + end + end + end +end From 042ecb0b71cdc27c1a495a68f3092344895a134e Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Wed, 24 Nov 2021 15:18:57 +0000 Subject: [PATCH 206/292] Move require of concurrent/atomics to the correct module --- lib/ldclient-rb/integrations/test_data.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 5522bb0a..0d661056 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -1,3 +1,4 @@ +require 'concurrent/atomics' require 'ldclient-rb/impl/integrations/test_data/test_data_source' require 'ldclient-rb/integrations/test_data/flag_builder' From 78e0da55ccc102d91344236a1a1254ab3e7326b5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Nov 2021 11:33:15 -0800 Subject: [PATCH 207/292] (big segments 2) implement Big Segments evaluation & status APIs (#168) --- lib/ldclient-rb/config.rb | 1 + lib/ldclient-rb/impl/big_segments.rb | 112 +++++++++++++ lib/ldclient-rb/impl/evaluator.rb | 108 +++++++++---- lib/ldclient-rb/impl/repeating_task.rb | 47 ++++++ lib/ldclient-rb/interfaces.rb | 4 + lib/ldclient-rb/ldclient.rb | 17 +- lib/ldclient-rb/polling.rb | 63 +++----- spec/impl/big_segments_spec.rb | 193 +++++++++++++++++++++++ spec/impl/evaluator_big_segments_spec.rb | 160 +++++++++++++++++++ spec/impl/evaluator_segment_spec.rb | 12 +- spec/impl/evaluator_spec.rb | 16 +- spec/impl/evaluator_spec_base.rb | 87 ++++++++-- spec/impl/repeating_task_spec.rb | 78 +++++++++ spec/ldclient_end_to_end_spec.rb | 40 ++--- spec/ldclient_listeners_spec.rb | 48 ++++++ spec/ldclient_spec_base.rb | 42 +++++ spec/mock_components.rb | 51 ++++++ 17 files changed, 951 insertions(+), 128 deletions(-) create mode 100644 lib/ldclient-rb/impl/big_segments.rb create mode 100644 lib/ldclient-rb/impl/repeating_task.rb create mode 100644 spec/impl/big_segments_spec.rb create mode 100644 spec/impl/evaluator_big_segments_spec.rb create mode 100644 spec/impl/repeating_task_spec.rb create mode 100644 spec/ldclient_listeners_spec.rb create mode 100644 spec/ldclient_spec_base.rb create mode 100644 spec/mock_components.rb diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index e84654ae..63c1997e 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -74,6 +74,7 @@ def initialize(opts = {}) @wrapper_name = opts[:wrapper_name] @wrapper_version = opts[:wrapper_version] @socket_factory = opts[:socket_factory] + @big_segments = opts[:big_segments] || BigSegmentsConfig.new(store: nil) end # diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb new file mode 100644 index 00000000..eb709246 --- /dev/null +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -0,0 +1,112 @@ +require "ldclient-rb/config" +require "ldclient-rb/expiring_cache" +require "ldclient-rb/impl/repeating_task" +require "ldclient-rb/interfaces" +require "ldclient-rb/util" + +require "digest" + +module LaunchDarkly + module Impl + BigSegmentMembershipResult = Struct.new(:membership, :status) + + class BigSegmentStoreManager + def initialize(big_segments_config, logger) + @store = big_segments_config.store + @stale_after_millis = big_segments_config.stale_after * 1000 + @status_provider = BigSegmentStoreStatusProviderImpl.new(-> { get_status }) + @logger = logger + @last_status = nil + + if !@store.nil? + @cache = ExpiringCache.new(big_segments_config.user_cache_size, big_segments_config.user_cache_time) + @poll_worker = RepeatingTask.new(big_segments_config.status_poll_interval, 0, -> { poll_store_and_update_status }, logger) + @poll_worker.start + end + end + + attr_reader :status_provider + + def stop + @poll_worker.stop if !@poll_worker.nil? + @store.stop if !@store.nil? + end + + def get_user_membership(user_key) + return nil if !@store + membership = @cache[user_key] + if !membership + begin + membership = @store.get_membership(BigSegmentStoreManager.hash_for_user_key(user_key)) + @cache[user_key] = membership + rescue => e + LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e) + return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR) + end + end + poll_store_and_update_status if !@last_status + if !@last_status.available + return BigSegmentMembershipResult.new(membership, BigSegmentsStatus::STORE_ERROR) + end + BigSegmentMembershipResult.new(membership, @last_status.stale ? BigSegmentsStatus::STALE : BigSegmentsStatus::HEALTHY) + end + + def get_status + @last_status || poll_store_and_update_status + end + + def poll_store_and_update_status + new_status = Interfaces::BigSegmentStoreStatus.new(false, false) # default to "unavailable" if we don't get a new status below + if !@store.nil? + begin + metadata = @store.get_metadata + new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || is_stale(metadata.last_up_to_date)) + rescue => e + LaunchDarkly::Util.log_exception(@logger, "Big Segment store status query returned error", e) + end + end + @last_status = new_status + @status_provider.update_status(new_status) + + new_status + end + + def is_stale(timestamp) + (Impl::Util.current_time_millis - timestamp) >= @stale_after_millis + end + + def self.hash_for_user_key(user_key) + Digest::MD5.base64digest(user_key) + end + end + + # + # Default implementation of the BigSegmentStoreStatusProvider interface. + # + # There isn't much to this because the real implementation is in BigSegmentStoreManager - we pass in a lambda + # that allows us to get the current status from that class. Also, the standard Observer methods such as + # add_observer are provided for us because BigSegmentStoreStatusProvider mixes in Observer, so all we need to + # to do make notifications happen is to call the Observer methods "changed" and "notify_observers". + # + class BigSegmentStoreStatusProviderImpl + include LaunchDarkly::Interfaces::BigSegmentStoreStatusProvider + + def initialize(status_fn) + @status_fn = status_fn + @last_status = nil + end + + def status + @status_fn.call + end + + def update_status(new_status) + if !@last_status || new_status != @last_status + @last_status = new_status + changed + notify_observers(new_status) + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 00898cd9..9e10c8ef 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -16,16 +16,28 @@ class Evaluator # flag data - or nil if the flag is unknown or deleted # @param get_segment [Function] similar to `get_flag`, but is used to query a user segment. # @param logger [Logger] the client's logger - def initialize(get_flag, get_segment, logger) + def initialize(get_flag, get_segment, get_big_segments_membership, logger) @get_flag = get_flag @get_segment = get_segment + @get_big_segments_membership = get_big_segments_membership @logger = logger end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. The - # `detail` property is an EvaluationDetail. The `events` property can be either an array of feature request - # events or nil. - EvalResult = Struct.new(:detail, :events) + # Used internally to hold an evaluation result and additional state that may be accumulated during an + # evaluation. It's simpler and a bit more efficient to represent these as mutable properties rather than + # trying to use a pure functional approach, and since we're not exposing this object to any application code + # or retaining it anywhere, we don't have to be quite as strict about immutability. + # + # The big_segments_status and big_segments_membership properties are not used by the caller; they are used + # during an evaluation to cache the result of any Big Segments query that we've done for this user, because + # we don't want to do multiple queries for the same user if multiple Big Segments are referenced in the same + # evaluation. + EvalResult = Struct.new( + :detail, # the EvaluationDetail representing the evaluation result + :events, # an array of evaluation events generated by prerequisites, or nil + :big_segments_status, + :big_segments_membership + ) # Helper function used internally to construct an EvaluationDetail for an error result. def self.error_result(errorKind, value = nil) @@ -42,30 +54,38 @@ def self.error_result(errorKind, value = nil) # evaluated; the caller is responsible for constructing the feature event for the top-level evaluation # @return [EvalResult] the evaluation result def evaluate(flag, user, event_factory) + result = EvalResult.new if user.nil? || user[:key].nil? - return EvalResult.new(Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED), []) + result.detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED) + return result end - - # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature - # request events for prerequisites and we can skip allocating an array. - if flag[:prerequisites] && !flag[:prerequisites].empty? - events = [] - else - events = nil + + detail = eval_internal(flag, user, result, event_factory) + if !result.big_segments_status.nil? + # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at + # some point and we will want to include the status in the evaluation reason. + detail = EvaluationDetail.new(detail.value, detail.variation_index, + detail.reason.with_big_segments_status(result.big_segments_status)) end + result.detail = detail + return result + end - detail = eval_internal(flag, user, events, event_factory) - return EvalResult.new(detail, events.nil? || events.empty? ? nil : events) + def self.make_big_segment_ref(segment) # method is visible for testing + # The format of Big Segment references is independent of what store implementation is being + # used; the store implementation receives only this string and does not know the details of + # the data model. The Relay Proxy will use the same format when writing to the store. + "#{segment[:key]}.g#{segment[:generation]}" end private - def eval_internal(flag, user, events, event_factory) + def eval_internal(flag, user, state, event_factory) if !flag[:on] return get_off_value(flag, EvaluationReason::off) end - prereq_failure_reason = check_prerequisites(flag, user, events, event_factory) + prereq_failure_reason = check_prerequisites(flag, user, state, event_factory) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason) end @@ -83,7 +103,7 @@ def eval_internal(flag, user, events, event_factory) rules = flag[:rules] || [] rules.each_index do |i| rule = rules[i] - if rule_match_user(rule, user) + if rule_match_user(rule, user, state) reason = rule[:_reason] # try to use cached reason for this rule reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? return get_value_for_variation_or_rollout(flag, rule, user, reason) @@ -98,7 +118,7 @@ def eval_internal(flag, user, events, event_factory) return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end - def check_prerequisites(flag, user, events, event_factory) + def check_prerequisites(flag, user, state, event_factory) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -109,14 +129,15 @@ def check_prerequisites(flag, user, events, event_factory) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, events, event_factory) + prereq_res = eval_internal(prereq_flag, user, state, event_factory) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) - events.push(event) + state.events = [] if state.events.nil? + state.events.push(event) rescue => exn Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false @@ -130,23 +151,23 @@ def check_prerequisites(flag, user, events, event_factory) nil end - def rule_match_user(rule, user) + def rule_match_user(rule, user, state) return false if !rule[:clauses] (rule[:clauses] || []).each do |clause| - return false if !clause_match_user(clause, user) + return false if !clause_match_user(clause, user, state) end return true end - def clause_match_user(clause, user) + def clause_match_user(clause, user, state) # In the case of a segment match operator, we check if the user is in any of the segments, # and possibly negate if clause[:op].to_sym == :segmentMatch result = (clause[:values] || []).any? { |v| segment = @get_segment.call(v) - !segment.nil? && segment_match_user(segment, user) + !segment.nil? && segment_match_user(segment, user, state) } clause[:negate] ? !result : result else @@ -168,11 +189,42 @@ def clause_match_user_no_segments(clause, user) clause[:negate] ? !result : result end - def segment_match_user(segment, user) + def segment_match_user(segment, user, state) return false unless user[:key] + segment[:unbounded] ? big_segment_match_user(segment, user, state) : simple_segment_match_user(segment, user, true) + end - return true if segment[:included].include?(user[:key]) - return false if segment[:excluded].include?(user[:key]) + def big_segment_match_user(segment, user, state) + if !segment[:generation] + # Big segment queries can only be done if the generation is known. If it's unset, + # that probably means the data store was populated by an older SDK that doesn't know + # about the generation property and therefore dropped it from the JSON data. We'll treat + # that as a "not configured" condition. + state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED + return false + end + if !state.big_segments_status + result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(user[:key]) + if result + state.big_segments_membership = result.membership + state.big_segments_status = result.status + else + state.big_segments_membership = nil + state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED + end + end + segment_ref = Evaluator.make_big_segment_ref(segment) + membership = state.big_segments_membership + included = membership.nil? ? nil : membership[segment_ref] + return included if !included.nil? + simple_segment_match_user(segment, user, false) + end + + def simple_segment_match_user(segment, user, use_includes_and_excludes) + if use_includes_and_excludes + return true if segment[:included].include?(user[:key]) + return false if segment[:excluded].include?(user[:key]) + end (segment[:rules] || []).each do |r| return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) diff --git a/lib/ldclient-rb/impl/repeating_task.rb b/lib/ldclient-rb/impl/repeating_task.rb new file mode 100644 index 00000000..bb0255fe --- /dev/null +++ b/lib/ldclient-rb/impl/repeating_task.rb @@ -0,0 +1,47 @@ +require "ldclient-rb/util" + +require "concurrent/atomics" + +module LaunchDarkly + module Impl + class RepeatingTask + def initialize(interval, start_delay, task, logger) + @interval = interval + @start_delay = start_delay + @task = task + @logger = logger + @stopped = Concurrent::AtomicBoolean.new(false) + @worker = nil + end + + def start + @worker = Thread.new do + if @start_delay + sleep(@start_delay) + end + while !@stopped.value do + started_at = Time.now + begin + @task.call + rescue => e + LaunchDarkly::Util.log_exception(@logger, "Uncaught exception from repeating task", e) + end + delta = @interval - (Time.now - started_at) + if delta > 0 + sleep(delta) + end + end + end + end + + def stop + if @stopped.make_true + if @worker && @worker.alive? && @worker != Thread.current + @worker.run # causes the thread to wake up if it's currently in a sleep + @worker.join + end + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 05b54d51..5a86ee23 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -247,6 +247,10 @@ def initialize(available, stale) # # @return [Boolean] attr_reader :stale + + def ==(other) + self.available == other.available && self.stale == other.stale + end end # diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ba2a7675..fc4ad173 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -57,10 +57,14 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config + @big_segment_store_manager = Impl::BigSegmentStoreManager.new(config.big_segments, @config.logger) + @big_segment_store_status_provider = @big_segment_store_manager.status_provider + get_flag = lambda { |key| @store.get(FEATURES, key) } get_segment = lambda { |key| @store.get(SEGMENTS, key) } - @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, @config.logger) - + get_big_segments_membership = lambda { |key| @big_segment_store_manager.get_user_membership(key) } + @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, get_big_segments_membership, @config.logger) + if !@config.offline? && @config.send_events && !@config.diagnostic_opt_out? diagnostic_accumulator = Impl::DiagnosticAccumulator.new(Impl::DiagnosticAccumulator.create_diagnostic_id(sdk_key)) else @@ -375,9 +379,18 @@ def close @config.logger.info { "[LDClient] Closing LaunchDarkly client..." } @data_source.stop @event_processor.stop + @big_segment_store_manager.stop @store.stop end + # + # Returns an interface for tracking the status of a Big Segment store. + # + # The {BigSegmentStoreStatusProvider} has methods for checking whether the Big Segment store + # is (as far as the SDK knows) currently operational and tracking changes in this status. + # + attr_reader :big_segment_store_status_provider + private def create_default_data_source(sdk_key, config, diagnostic_accumulator) diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index a9312413..d571f837 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/impl/repeating_task" + require "concurrent/atomics" require "thread" @@ -9,8 +11,8 @@ def initialize(config, requestor) @requestor = requestor @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) - @stopped = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new + @task = Impl::RepeatingTask.new(@config.poll_interval, 0, -> { self.poll }, @config.logger) end def initialized? @@ -20,56 +22,35 @@ def initialized? def start return @ready unless @started.make_true @config.logger.info { "[LDClient] Initializing polling connection" } - create_worker + @task.start @ready end def stop - if @stopped.make_true - if @worker && @worker.alive? && @worker != Thread.current - @worker.run # causes the thread to wake up if it's currently in a sleep - @worker.join - end - @config.logger.info { "[LDClient] Polling connection stopped" } - end + @task.stop + @config.logger.info { "[LDClient] Polling connection stopped" } end def poll - all_data = @requestor.request_all_data - if all_data - @config.feature_store.init(all_data) - if @initialized.make_true - @config.logger.info { "[LDClient] Polling connection initialized" } - @ready.set - end - end - end - - def create_worker - @worker = Thread.new do - @config.logger.debug { "[LDClient] Starting polling worker" } - while !@stopped.value do - started_at = Time.now - begin - poll - rescue UnexpectedResponseError => e - message = Util.http_error_message(e.status, "polling request", "will retry") - @config.logger.error { "[LDClient] #{message}" }; - if !Util.http_error_recoverable?(e.status) - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop - end - rescue StandardError => exn - Util.log_exception(@config.logger, "Exception while polling", exn) - end - delta = @config.poll_interval - (Time.now - started_at) - if delta > 0 - sleep(delta) + begin + all_data = @requestor.request_all_data + if all_data + @config.feature_store.init(all_data) + if @initialized.make_true + @config.logger.info { "[LDClient] Polling connection initialized" } + @ready.set end end + rescue UnexpectedResponseError => e + message = Util.http_error_message(e.status, "polling request", "will retry") + @config.logger.error { "[LDClient] #{message}" }; + if !Util.http_error_recoverable?(e.status) + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop + end + rescue StandardError => e + Util.log_exception(@config.logger, "Exception while polling", e) end end - - private :poll, :create_worker end end diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb new file mode 100644 index 00000000..640e03dc --- /dev/null +++ b/spec/impl/big_segments_spec.rb @@ -0,0 +1,193 @@ +require "ldclient-rb/config" +require "ldclient-rb/impl/big_segments" + +require "concurrent/atomics" + +require "spec_helper" + +module LaunchDarkly + module Impl + describe BigSegmentStoreManager do + subject { BigSegmentStoreManager } + + let(:user_key) { 'userkey' } + let(:user_hash) { subject.hash_for_user_key(user_key) } + let(:null_logger) { double.as_null_object } + + def always_up_to_date + Interfaces::BigSegmentStoreMetadata.new(Util.current_time_millis) + end + + def always_stale + Interfaces::BigSegmentStoreMetadata.new(0) + end + + def with_manager(config) + manager = subject.new(config, null_logger) + begin + yield manager + ensure + manager.stop + end + end + + context "membership query" do + it "with uncached result and healthy status" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "with cached result and healthy status" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).once.and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "with stale status" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_stale) + expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::STALE) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "with stale status due to no store metadata" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(nil) + expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::STALE) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + + it "least recent user is evicted from cache" do + user_key_1, user_key_2, user_key_3 = 'userkey1', 'userkey2', 'userkey3' + user_hash_1, user_hash_2, user_hash_3 = subject.hash_for_user_key(user_key_1), + subject.hash_for_user_key(user_key_2), subject.hash_for_user_key(user_key_3) + memberships = { + user_hash_1 => { 'seg1': true }, + user_hash_2 => { 'seg2': true }, + user_hash_3 => { 'seg3': true } + } + queried_users = [] + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).exactly(4).times do |key| + queried_users << key + memberships[key] + end + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store, user_cache_size: 2)) do |m| + result1 = m.get_user_membership(user_key_1) + result2 = m.get_user_membership(user_key_2) + result3 = m.get_user_membership(user_key_3) + expect(result1).to eq(BigSegmentMembershipResult.new(memberships[user_hash_1], BigSegmentsStatus::HEALTHY)) + expect(result2).to eq(BigSegmentMembershipResult.new(memberships[user_hash_2], BigSegmentsStatus::HEALTHY)) + expect(result3).to eq(BigSegmentMembershipResult.new(memberships[user_hash_3], BigSegmentsStatus::HEALTHY)) + + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) + + # Since the capacity is only 2 and user_key_1 was the least recently used, that key should be + # evicted by the user_key_3 query. Now only user_key_2 and user_key_3 are in the cache, and + # querying them again should not cause a new query to the store. + + result2a = m.get_user_membership(user_key_2) + result3a = m.get_user_membership(user_key_3) + expect(result2a).to eq(result2) + expect(result3a).to eq(result3) + + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) + + result1a = m.get_user_membership(user_key_1) + expect(result1a).to eq(result1) + + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3, user_hash_1]) + end + end + end + + context "status polling" do + it "detects store unavailability" do + store = double + should_fail = Concurrent::AtomicBoolean.new(false) + expect(store).to receive(:get_metadata).at_least(:once) do + throw "sorry" if should_fail.value + always_up_to_date + end + allow(store).to receive(:stop) + + statuses = Queue.new + with_manager(BigSegmentsConfig.new(store: store, status_poll_interval: 0.01)) do |m| + m.status_provider.add_observer(SimpleObserver.new(->(value) { statuses << value })) + + status1 = statuses.pop() + expect(status1.available).to be(true) + + should_fail.make_true + + status2 = statuses.pop() + expect(status2.available).to be(false) + + should_fail.make_false + + status3 = statuses.pop() + expect(status3.available).to be(true) + end + end + + it "detects stale status" do + store = double + should_be_stale = Concurrent::AtomicBoolean.new(false) + expect(store).to receive(:get_metadata).at_least(:once) do + should_be_stale.value ? always_stale : always_up_to_date + end + allow(store).to receive(:stop) + + statuses = Queue.new + with_manager(BigSegmentsConfig.new(store: store, status_poll_interval: 0.01)) do |m| + m.status_provider.add_observer(SimpleObserver.new(->(value) { statuses << value })) + + status1 = statuses.pop() + expect(status1.stale).to be(false) + + should_be_stale.make_true + + status2 = statuses.pop() + expect(status2.stale).to be(true) + + should_be_stale.make_false + + status3 = statuses.pop() + expect(status3.stale).to be(false) + end + end + end + end + end +end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb new file mode 100644 index 00000000..b8a9e2e4 --- /dev/null +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -0,0 +1,160 @@ +require "ldclient-rb/impl/big_segments" + +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (big segments)", :evaluator_spec_base => true do + subject { Evaluator } + + it "segment is not matched if there is no way to query it" do + segment = { + key: 'test', + included: [ user[:key] ], # included should be ignored for a big segment + version: 1, + unbounded: true, + generation: 1 + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) + end + + it "segment with no generation is not matched" do + segment = { + key: 'test', + included: [ user[:key] ], # included should be ignored for a big segment + version: 1, + unbounded: true + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) + end + + it "matched with include" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2 + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, true). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end + + it "matched with rule" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2, + rules: [ + { clauses: [ make_user_matching_clause(user) ] } + ] + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, nil). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end + + it "unmatched by exclude regardless of rule" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2, + rules: [ + { clauses: [ make_user_matching_clause(user) ] } + ] + }; + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, false). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end + + it "status is returned from provider" do + segment = { + key: 'test', + version: 1, + unbounded: true, + generation: 2 + } + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, true). + with_big_segments_status(BigSegmentsStatus::STALE). + build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) + end + + it "queries state only once per user even if flag references multiple segments" do + segment1 = { + key: 'segmentkey1', + version: 1, + unbounded: true, + generation: 2 + } + segment2 = { + key: 'segmentkey2', + version: 1, + unbounded: true, + generation: 3 + } + flag = { + key: 'key', + on: true, + fallthrough: { variation: 0 }, + variations: [ false, true ], + rules: [ + { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, + { variation: 1, clauses: [ make_segment_match_clause(segment2) ]} + ] + } + + queries = [] + e = EvaluatorBuilder.new(logger). + with_segment(segment1).with_segment(segment2). + with_big_segment_for_user(user, segment2, true). + record_big_segments_queries(queries). + build + # The membership deliberately does not include segment1, because we want the first rule to be + # a non-match so that it will continue on and check segment2 as well. + + result = e.evaluate(flag, user, factory) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + + expect(queries).to eq([ user[:key] ]) + end + end + end +end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 64fb1bc7..5cd85148 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -9,7 +9,7 @@ module Impl def test_segment_match(segment) clause = make_segment_match_clause(segment) flag = boolean_flag_with_clauses([clause]) - e = Evaluator.new(get_nothing, get_things({ segment[:key] => segment }), logger) + e = EvaluatorBuilder.new(logger).with_segment(segment).build e.evaluate(flag, user, factory).detail.value end @@ -20,17 +20,13 @@ def test_segment_match(segment) version: 1, deleted: false } - get_segment = get_things({ 'segkey' => segment }) - e = subject.new(get_nothing, get_segment, logger) - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) + e = EvaluatorBuilder.new(logger).with_segment(segment).build + flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) expect(e.evaluate(flag, user, factory).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do - e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) - user = { key: 'userkey' } + e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) expect(e.evaluate(flag, user, factory).detail.value).to be false diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 543b524d..15766866 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -79,7 +79,7 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) - e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -96,7 +96,7 @@ module Impl } Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason user = { key: 'x' } - e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build result1 = e.evaluate(flag, user, factory) expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') result2 = e.evaluate(flag, user, factory) @@ -126,8 +126,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things('feature1' => flag1, 'feature2' => nil) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) @@ -157,8 +156,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things({ 'feature1' => flag1 }) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) @@ -186,8 +184,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things({ 'feature1' => flag1 }) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) @@ -215,8 +212,7 @@ module Impl events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - get_flag = get_things({ 'feature1' => flag1 }) - e = subject.new(get_flag, get_nothing, logger) + e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index fa8b86c3..da8662ac 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -1,7 +1,79 @@ +require "ldclient-rb/impl/big_segments" + require "spec_helper" module LaunchDarkly module Impl + class EvaluatorBuilder + def initialize(logger) + @flags = {} + @segments = {} + @big_segment_memberships = {} + @big_segments_status = BigSegmentsStatus::HEALTHY + @big_segments_queries = [] + @logger = logger + end + + def with_flag(flag) + @flags[flag[:key]] = flag + self + end + + def with_unknown_flag(key) + @flags[key] = nil + self + end + + def with_segment(segment) + @segments[segment[:key]] = segment + self + end + + def with_unknown_segment(key) + @segments[key] = nil + self + end + + def with_big_segment_for_user(user, segment, included) + user_key = user[:key] + @big_segment_memberships[user_key] = {} if !@big_segment_memberships.has_key?(user_key) + @big_segment_memberships[user_key][Evaluator.make_big_segment_ref(segment)] = included + self + end + + def with_big_segments_status(status) + @big_segments_status = status + self + end + + def record_big_segments_queries(destination) + @big_segments_queries = destination + self + end + + def build + Evaluator.new(method(:get_flag), method(:get_segment), + @big_segment_memberships.empty? ? nil : method(:get_big_segments), + @logger) + end + + private def get_flag(key) + raise "should not have requested flag #{key}" if !@flags.has_key?(key) + @flags[key] + end + + private def get_segment(key) + raise "should not have requested segment #{key}" if !@segments.has_key?(key) + @segments[key] + end + + private def get_big_segments(user_key) + raise "should not have requested big segments for #{user_key}" if !@big_segment_memberships.has_key?(user_key) + @big_segments_queries << user_key + BigSegmentMembershipResult.new(@big_segment_memberships[user_key], @big_segments_status) + end + end + module EvaluatorSpecBase def factory EventFactory.new(false) @@ -19,19 +91,8 @@ def logger ::Logger.new($stdout, level: ::Logger::FATAL) end - def get_nothing - lambda { |key| raise "should not have requested #{key}" } - end - - def get_things(map) - lambda { |key| - raise "should not have requested #{key}" if !map.has_key?(key) - map[key] - } - end - def basic_evaluator - subject.new(get_nothing, get_nothing, logger) + EvaluatorBuilder.new(logger).build end def boolean_flag_with_rules(rules) @@ -42,7 +103,7 @@ def boolean_flag_with_clauses(clauses) boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) end - def make_user_matching_clause(user, attr) + def make_user_matching_clause(user, attr = :key) { attribute: attr.to_s, op: :in, diff --git a/spec/impl/repeating_task_spec.rb b/spec/impl/repeating_task_spec.rb new file mode 100644 index 00000000..ba780d78 --- /dev/null +++ b/spec/impl/repeating_task_spec.rb @@ -0,0 +1,78 @@ +require "ldclient-rb/impl/repeating_task" + +require "concurrent/atomics" + +require "spec_helper" + +module LaunchDarkly + module Impl + describe RepeatingTask do + def null_logger + double().as_null_object + end + + it "does not start when created" do + signal = Concurrent::Event.new + task = RepeatingTask.new(0.01, 0, -> { signal.set }, null_logger) + begin + expect(signal.wait(0.1)).to be false + ensure + task.stop + end + end + + it "executes until stopped" do + queue = Queue.new + task = RepeatingTask.new(0.1, 0, -> { queue << Time.now }, null_logger) + begin + last = nil + task.start + 3.times do + time = queue.pop + if !last.nil? + expect(time.to_f - last.to_f).to be >=(0.05) + end + last = time + end + ensure + task.stop + stopped_time = Time.now + end + no_more_items = false + 2.times do + begin + time = queue.pop(true) + expect(time.to_f).to be <=(stopped_time.to_f) + rescue ThreadError + no_more_items = true + break + end + end + expect(no_more_items).to be true + end + + it "can be stopped from within the task" do + counter = 0 + stopped = Concurrent::Event.new + task = RepeatingTask.new(0.01, 0, + -> { + counter += 1 + if counter >= 2 + task.stop + stopped.set + end + }, + null_logger) + begin + task.start + expect(stopped.wait(0.1)).to be true + expect(counter).to be 2 + sleep(0.1) + expect(counter).to be 2 + ensure + task.stop + end + end + end + end +end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index 6366a6b7..cbefcf6c 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -1,11 +1,8 @@ require "http_util" +require "ldclient_spec_base" require "spec_helper" -SDK_KEY = "sdk-key" - -USER = { key: 'userkey' } - ALWAYS_TRUE_FLAG = { key: 'flagkey', version: 1, on: false, offVariation: 1, variations: [ false, true ] } DATA_WITH_ALWAYS_TRUE_FLAG = { flags: { ALWAYS_TRUE_FLAG[:key ].to_sym => ALWAYS_TRUE_FLAG }, @@ -13,20 +10,11 @@ } PUT_EVENT_WITH_ALWAYS_TRUE_FLAG = "event: put\ndata:{\"data\":#{DATA_WITH_ALWAYS_TRUE_FLAG.to_json}}\n\n'" -def with_client(config) - client = LaunchDarkly::LDClient.new(SDK_KEY, config) - begin - yield client - ensure - client.close - end -end - module LaunchDarkly # Note that we can't do end-to-end tests in streaming mode until we have a test server that can do streaming # responses, which is difficult in WEBrick. - describe "LDClient end-to-end" do + describe "LDClient end-to-end", :ldclient_spec_base => true do it "starts in polling mode" do with_server do |poll_server| poll_server.setup_ok_response("/sdk/latest-all", DATA_WITH_ALWAYS_TRUE_FLAG.to_json, "application/json") @@ -35,11 +23,11 @@ module LaunchDarkly stream: false, base_uri: poll_server.base_uri.to_s, send_events: false, - logger: NullLogger.new + logger: null_logger ) with_client(config) do |client| expect(client.initialized?).to be true - expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be true + expect(client.variation(ALWAYS_TRUE_FLAG[:key], user, false)).to be true end end end @@ -52,11 +40,11 @@ module LaunchDarkly stream: false, base_uri: poll_server.base_uri.to_s, send_events: false, - logger: NullLogger.new + logger: null_logger ) with_client(config) do |client| expect(client.initialized?).to be false - expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be false + expect(client.variation(ALWAYS_TRUE_FLAG[:key], user, false)).to be false end end end @@ -72,14 +60,14 @@ module LaunchDarkly base_uri: poll_server.base_uri.to_s, events_uri: events_server.base_uri.to_s, diagnostic_opt_out: true, - logger: NullLogger.new + logger: null_logger ) with_client(config) do |client| - client.identify(USER) + client.identify(user) client.flush req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['authorization']).to eq [ sdk_key ] expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data.length).to eq 1 @@ -100,7 +88,7 @@ module LaunchDarkly stream: false, base_uri: poll_server.base_uri.to_s, events_uri: events_server.base_uri.to_s, - logger: NullLogger.new + logger: null_logger ) with_client(config) do |client| user = { key: 'userkey' } @@ -111,7 +99,7 @@ module LaunchDarkly req1, body1 = events_server.await_request_with_body req = req0.path == "/diagnostic" ? req0 : req1 body = req0.path == "/diagnostic" ? body0 : body1 - expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['authorization']).to eq [ sdk_key ] expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data["kind"]).to eq "diagnostic-init" @@ -131,18 +119,18 @@ module LaunchDarkly base_uri: "http://fake-polling-server", events_uri: "http://fake-events-server", diagnostic_opt_out: true, - logger: NullLogger.new, + logger: null_logger, socket_factory: SocketFactoryFromHash.new({ "fake-polling-server" => poll_server.port, "fake-events-server" => events_server.port }) ) with_client(config) do |client| - client.identify(USER) + client.identify(user) client.flush req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['authorization']).to eq [ sdk_key ] expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data.length).to eq 1 diff --git a/spec/ldclient_listeners_spec.rb b/spec/ldclient_listeners_spec.rb new file mode 100644 index 00000000..b86bbb82 --- /dev/null +++ b/spec/ldclient_listeners_spec.rb @@ -0,0 +1,48 @@ +require "ldclient_spec_base" +require "mock_components" +require "spec_helper" + +module LaunchDarkly + describe "LDClient event listeners/observers", :ldclient_spec_base => true do + context "big_segment_store_status_provider" do + it "returns unavailable status when not configured" do + with_client(base_config) do |client| + status = client.big_segment_store_status_provider.status + expect(status.available).to be(false) + expect(status.stale).to be(false) + end + end + + it "sends status updates" do + store = MockBigSegmentStore.new + store.setup_metadata(Time.now) + config = Config.new( + big_segments: BigSegmentsConfig.new( + store: store, + status_poll_interval: 0.01 + ), + send_events: false, + data_source: null_data_source, + logger: null_logger + ) + with_client(config) do |client| + status1 = client.big_segment_store_status_provider.status + expect(status1.available).to be(true) + expect(status1.stale).to be(false) + + statuses = Queue.new + observer = SimpleObserver.adding_to_queue(statuses) + client.big_segment_store_status_provider.add_observer(observer) + + store.setup_metadata_error(StandardError.new("sorry")) + + status2 = statuses.pop() + expect(status2.available).to be(false) + expect(status2.stale).to be(false) + + expect(client.big_segment_store_status_provider.status).to eq(status2) + end + end + end + end +end diff --git a/spec/ldclient_spec_base.rb b/spec/ldclient_spec_base.rb new file mode 100644 index 00000000..47ecd281 --- /dev/null +++ b/spec/ldclient_spec_base.rb @@ -0,0 +1,42 @@ +require "ldclient-rb/impl/big_segments" + +require "spec_helper" + +module LaunchDarkly + module LDClientSpecBase + def sdk_key + "sdk-key" + end + + def user + { + key: "userkey", + email: "test@example.com", + name: "Bob" + } + end + + def null_logger + double().as_null_object + end + + def null_data_source + NullUpdateProcessor.new + end + + def base_config + Config.new(send_events: false, data_source: null_data_source, logger: null_logger) + end + + def with_client(config) + client = LDClient.new(sdk_key, config) + begin + yield client + ensure + client.close + end + end + end + + RSpec.configure { |c| c.include LDClientSpecBase, :ldclient_spec_base => true } +end diff --git a/spec/mock_components.rb b/spec/mock_components.rb new file mode 100644 index 00000000..aa57431c --- /dev/null +++ b/spec/mock_components.rb @@ -0,0 +1,51 @@ +require "ldclient-rb/impl/big_segments" +require "ldclient-rb/interfaces" + +module LaunchDarkly + class MockBigSegmentStore + def initialize + @metadata = nil + @metadata_error = nil + @memberships = {} + end + + def get_metadata + raise @metadata_error if !@metadata_error.nil? + @metadata + end + + def get_membership(user_hash) + @memberships[user_hash] + end + + def stop + end + + def setup_metadata(last_up_to_date) + @metadata = Interfaces::BigSegmentStoreMetadata.new(last_up_to_date.to_f * 1000) + end + + def setup_metadata_error(ex) + @metadata_error = ex + end + + def setup_membership(user_key, membership) + user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) + @memberships[user_hash] = membership + end + end + + class SimpleObserver + def initialize(fn) + @fn = fn + end + + def update(value) + @fn.call(value) + end + + def self.adding_to_queue(q) + new(->(value) { q << value }) + end + end +end From a0794d998e26c841d3e597633abeb4b5dea6e830 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Nov 2021 18:50:25 -0800 Subject: [PATCH 208/292] improve CONTRIBUTING.md with notes on code organization --- CONTRIBUTING.md | 35 +++++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 49c6df85..902d660e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,20 +1,16 @@ -Contributing to the LaunchDarkly Server-side SDK for Ruby -================================================ +# Contributing to the LaunchDarkly Server-side SDK for Ruby LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/sdk/concepts/contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. -Submitting bug reports and feature requests ------------------- +## Submitting bug reports and feature requests The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/ruby-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. -Submitting pull requests ------------------- +## Submitting pull requests We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. -Build instructions ------------------- +## Build instructions ### Prerequisites @@ -35,3 +31,26 @@ bundle exec rspec spec ``` By default, the full unit test suite includes live tests of the integrations for Consul, DynamoDB, and Redis. Those tests expect you to have instances of all of those databases running locally. To skip them, set the environment variable `LD_SKIP_DATABASE_TESTS=1` before running the tests. + +### Building documentation + +Documentation is built automatically with YARD for each release. To build the documentation locally: + +``` +cd docs +make +``` + +The output will appear in `docs/build/html`. + +## Code organization + +The SDK's namespacing convention is as follows: + +* `LaunchDarkly`: This namespace contains the most commonly used classes and methods in the SDK, such as `LDClient` and `EvaluationDetail`. +* `LaunchDarkly::Integrations`: This namespace contains entry points for optional features that are related to how the SDK communicates with other systems, such as `Redis`. +* `LaunchDarkly::Interfaces`: This namespace contains types that do not do anything by themselves, but may need to be referenced if you are using optional features or implementing a custom component. + +A special case is the namespace `LaunchDarkly::Impl`, and any namespaces within it. Everything under `Impl` is considered a private implementation detail: all files there are excluded from the generated documentation, and are considered subject to change at any time and not supported for direct use by application developers. We do this because Ruby's scope/visibility system is somewhat limited compared to other languages: a method can be `private` or `protected` within a class, but there is no way to make it visible to other classes in the SDK yet invisible to code outside of the SDK, and there is similarly no way to hide a class. + +So, if there is a class whose existence is entirely an implementation detail, it should be in `Impl`. Similarly, classes that are _not_ in `Impl` must not expose any public members that are not meant to be part of the supported public API. This is important because of our guarantee of backward compatibility for all public APIs within a major version: we want to be able to change our implementation details to suit the needs of the code, without worrying about breaking a customer's code. Due to how the language works, we can't actually prevent an application developer from referencing those classes in their code, but this convention makes it clear that such use is discouraged and unsupported. From 7199ff1621839eeb7be7522fc9843d7866d5ef9c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Nov 2021 19:00:20 -0800 Subject: [PATCH 209/292] add note about doc comments --- CONTRIBUTING.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 902d660e..edaa9a64 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -54,3 +54,9 @@ The SDK's namespacing convention is as follows: A special case is the namespace `LaunchDarkly::Impl`, and any namespaces within it. Everything under `Impl` is considered a private implementation detail: all files there are excluded from the generated documentation, and are considered subject to change at any time and not supported for direct use by application developers. We do this because Ruby's scope/visibility system is somewhat limited compared to other languages: a method can be `private` or `protected` within a class, but there is no way to make it visible to other classes in the SDK yet invisible to code outside of the SDK, and there is similarly no way to hide a class. So, if there is a class whose existence is entirely an implementation detail, it should be in `Impl`. Similarly, classes that are _not_ in `Impl` must not expose any public members that are not meant to be part of the supported public API. This is important because of our guarantee of backward compatibility for all public APIs within a major version: we want to be able to change our implementation details to suit the needs of the code, without worrying about breaking a customer's code. Due to how the language works, we can't actually prevent an application developer from referencing those classes in their code, but this convention makes it clear that such use is discouraged and unsupported. + +## Documenting types and methods + +All classes and public methods outside of `LaunchDarkly::Impl` should have documentation comments. These are used to build the API documentation that is published at https://launchdarkly.github.io/ruby-server-sdk/ and https://www.rubydoc.info/gems/launchdarkly-server-sdk. The documentation generator is YARD; see https://yardoc.org/ for the comment format it uses. + +Please try to make the style and terminology in documentation comments consistent with other documentation comments in the SDK. Also, if a class or method is being added that has an equivalent in other SDKs, and if we have described it in a consistent away in those other SDKs, please reuse the text whenever possible (with adjustments for anything language-specific) rather than writing new text. From b9f879f73713c9b8310460ae8472f7e03b0fd2e5 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Mon, 29 Nov 2021 18:58:42 +0000 Subject: [PATCH 210/292] Cleanup YARD warnings and cleanup docs --- lib/ldclient-rb/integrations/test_data.rb | 10 ++-- .../integrations/test_data/flag_builder.rb | 55 +++++++++++-------- 2 files changed, 38 insertions(+), 27 deletions(-) diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 0d661056..f75c657f 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -33,8 +33,7 @@ class TestData # Creates a new instance of the test data source. # # - # @return a new configurable test data source - # + # @return [TestData] a new configurable test data source def self.data_source self.new end @@ -52,6 +51,7 @@ def initialize # Called internally by the SDK to determine what arguments to pass to call # You do not need to call this method. # + # @private def arity 2 end @@ -60,6 +60,7 @@ def arity # Called internally by the SDK to associate this test data source with an {@code LDClient} instance. # You do not need to call this method. # + # @private def call(_, config) impl = LaunchDarkly::Impl::Integrations::TestData::TestDataSource.new(config.feature_store, self) @instances_lock.with_write_lock { @instances.push(impl) } @@ -73,7 +74,7 @@ def call(_, config) # starts with the same configuration that was last provided for this flag. # # Otherwise, it starts with a new default configuration in which the flag has `true` and - # `false variations, is `true` for all users when targeting is turned on and + # `false` variations, is `true` for all users when targeting is turned on and # `false` otherwise, and currently has targeting turned on. You can change any of those # properties, and provide more complex behavior, using the {FlagBuilder} methods. # @@ -104,7 +105,7 @@ def flag(key) # unless you call {#update} again. # # @param flag_builder [FlagBuilder] a flag configuration builder - # @return [TestData] the same `TestData` instance + # @return [TestData] self # def update(flag_builder) new_flag = nil @@ -123,6 +124,7 @@ def update(flag_builder) instance.upsert(new_flag) end end + self end # @private diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index f2256c33..c4f4d34c 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -4,10 +4,10 @@ module LaunchDarkly module Integrations class TestData # - # A builder for feature flag configurations to be used with {TestDataImpl}. + # A builder for feature flag configurations to be used with {TestData}. # - # @see TestDataImpl#flag - # @see TestDataImpl#update + # @see TestData#flag + # @see TestData#update # class FlagBuilder attr_reader :key @@ -32,7 +32,7 @@ def initialize_copy(other) # # The effect of this depends on the rest of the flag configuration, just as it does on the # real LaunchDarkly dashboard. In the default configuration that you get from calling - # {TestDataImpl#flag} with a new flag key, the flag will return `false` + # {TestData#flag} with a new flag key, the flag will return `false` # whenever targeting is off, and `true` when targeting is on. # # @param on [Boolean] true if targeting should be on @@ -53,7 +53,7 @@ def on(on) # # @param variation [Boolean, Integer] true or false or the desired fallthrough variation index: # 0 for the first, 1 for the second, etc. - # @return the builder + # @return [FlagBuilder] the builder # def fallthrough_variation(variation) if LaunchDarkly::Util.is_bool variation then @@ -91,7 +91,15 @@ def off_variation(variation) # normally has `true, false`; a string-valued flag might have # `'red', 'green'`; etc. # - # @param *variations [Array] the desired variations + # @example A single variation + # td.flag('new-flag') + # .variations(true) + # + # @example Multiple variations + # td.flag('new-flag') + # .variations('red', 'green', 'blue') + # + # @param variations [Array] the the desired variations # @return [FlagBuilder] the builder # def variations(*variations) @@ -180,12 +188,12 @@ def variation_for_user(user_key, variation) # .then_return(true); # # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to + # @param values [Array] values to compare to # @return [FlagRuleBuilder] a flag rule builder # - # @see {FlagRuleBuilder#then_return} call to finish the rule - # @see {FlagRuleBuilder#and_match} add more tests - # @see {FlagRuleBuilder#and_not_match} add more tests + # @see FlagRuleBuilder#then_return + # @see FlagRuleBuilder#and_match + # @see FlagRuleBuilder#and_not_match # def if_match(attribute, *values) FlagRuleBuilder.new(self).and_match(attribute, *values) @@ -200,12 +208,12 @@ def if_match(attribute, *values) # .then_return(true) # # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to + # @param values [Array] values to compare to # @return [FlagRuleBuilder] a flag rule builder # - # @see {FlagRuleBuilder#then_return} call to finish the rule - # @see {FlagRuleBuilder#and_match} add more tests - # @see {FlagRuleBuilder#and_not_match} add more tests + # @see FlagRuleBuilder#then_return + # @see FlagRuleBuilder#and_match + # @see FlagRuleBuilder#and_not_match # def if_not_match(attribute, *values) FlagRuleBuilder.new(self).and_not_match(attribute, *values) @@ -243,14 +251,14 @@ def add_rule(rule) end # - # A shortcut for setting the flag to use the standard boolean configuration. + # A shortcut for setting the flag to use the standard boolean configuration. # - # This is the default for all new flags created with {TestDataImpl#flag}. - # The flag will have two variations, `true` and `false` (in that order); - # it will return `false` whenever targeting is off, and `true` when targeting is on - # if no other settings specify otherwise. + # This is the default for all new flags created with {TestData#flag}. + # The flag will have two variations, `true` and `false` (in that order); + # it will return `false` whenever targeting is off, and `true` when targeting is on + # if no other settings specify otherwise. # - # @return [FlagBuilder] the builder + # @return [FlagBuilder] the builder # def boolean_flag if is_boolean_flag then @@ -308,6 +316,7 @@ def build(version) # Finally, call {#then_return} to finish defining the rule. # class FlagRuleBuilder + # @private FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) # @private @@ -332,7 +341,7 @@ def intialize_copy(other) # .then_return(true) # # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to + # @param values [Array] values to compare to # @return [FlagRuleBuilder] the rule builder # def and_match(attribute, *values) @@ -355,7 +364,7 @@ def and_match(attribute, *values) # .then_return(true) # # @param attribute [Symbol] the user attribute to match against - # @param *values [Array] values to compare to + # @param values [Array] values to compare to # @return [FlagRuleBuilder] the rule builder # def and_not_match(attribute, *values) @@ -377,7 +386,7 @@ def and_not_match(attribute, *values) # # @param variation [Boolean, Integer] true or false or the desired variation index: # 0 for the first, 1 for the second, etc. - # @result [FlagBuilder] the flag builder with this rule added + # @return [FlagBuilder] the flag builder with this rule added # def then_return(variation) if LaunchDarkly::Util.is_bool variation then From 5bbf2d31b3d6302a3cf9f27c0a68c5d05f2b3e75 Mon Sep 17 00:00:00 2001 From: Ben Levy Date: Tue, 30 Nov 2021 20:40:18 +0000 Subject: [PATCH 211/292] Address PR feedback: Move is_bool back to Impl namespace to avoid confusion; Remove unnecessary nil check on variations in build function; fixup comments --- lib/ldclient-rb/impl/util.rb | 4 ++++ lib/ldclient-rb/integrations.rb | 4 ---- lib/ldclient-rb/integrations/test_data.rb | 3 +-- .../integrations/test_data/flag_builder.rb | 15 ++++++--------- lib/ldclient-rb/util.rb | 4 ---- 5 files changed, 11 insertions(+), 19 deletions(-) diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index fe82cea1..5fe93a2b 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -1,6 +1,10 @@ module LaunchDarkly module Impl module Util + def self.is_bool(aObject) + [true,false].include? aObject + end + def self.current_time_millis (Time.now.to_f * 1000).to_i end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index a4b5f789..fccea008 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -53,9 +53,5 @@ module Util # code is in ldclient-rb/integrations/util/ end - - class TestData - # code is in ldclient-rb/integrations/test_data - end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index f75c657f..3a8f190f 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -12,7 +12,7 @@ module Integrations # the data that the application has put into it using the {#update} method. # # @example - # td = LaunchDarkly::Integrations::TestData.factory + # td = LaunchDarkly::Integrations::TestData.data_source # td.update(td.flag("flag-key-1").variation_for_all_users(true)) # config = LaunchDarkly::Config.new(data_source: td) # client = LaunchDarkly::LDClient.new('sdkKey', config) @@ -32,7 +32,6 @@ module Integrations class TestData # Creates a new instance of the test data source. # - # # @return [TestData] a new configurable test data source def self.data_source self.new diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index c4f4d34c..3f3cc36c 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -56,7 +56,7 @@ def on(on) # @return [FlagBuilder] the builder # def fallthrough_variation(variation) - if LaunchDarkly::Util.is_bool variation then + if LaunchDarkly::Impl::Util.is_bool variation then boolean_flag.fallthrough_variation(variation_for_boolean(variation)) else @fallthrough_variation = variation @@ -76,7 +76,7 @@ def fallthrough_variation(variation) # @return [FlagBuilder] the builder # def off_variation(variation) - if LaunchDarkly::Util.is_bool variation then + if LaunchDarkly::Impl::Util.is_bool variation then boolean_flag.off_variation(variation_for_boolean(variation)) else @off_variation = variation @@ -121,7 +121,7 @@ def variations(*variations) # @return [FlagBuilder] the builder # def variation_for_all_users(variation) - if LaunchDarkly::Util.is_bool variation then + if LaunchDarkly::Impl::Util.is_bool variation then boolean_flag.variation_for_all_users(variation_for_boolean(variation)) else on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) @@ -158,7 +158,7 @@ def value_for_all_users(value) # @return [FlagBuilder] the builder # def variation_for_user(user_key, variation) - if LaunchDarkly::Util.is_bool variation then + if LaunchDarkly::Impl::Util.is_bool variation then boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) else if @targets.nil? then @@ -275,6 +275,7 @@ def build(version) res = { key: @key, version: version, on: @on, + variations: @variations, } unless @off_variation.nil? then @@ -285,10 +286,6 @@ def build(version) res[:fallthrough] = { variation: @fallthrough_variation } end - unless @variations.nil? then - res[:variations] = @variations - end - unless @targets.nil? then res[:targets] = @targets.collect do | variation, values | { variation: variation, values: values } @@ -389,7 +386,7 @@ def and_not_match(attribute, *values) # @return [FlagBuilder] the flag builder with this rule added # def then_return(variation) - if LaunchDarkly::Util.is_bool variation then + if LaunchDarkly::Impl::Util.is_bool variation then @variation = @flag_builder.variation_for_boolean(variation) @flag_builder.boolean_flag.add_rule(self) else diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index a82590b8..7bd56959 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -4,10 +4,6 @@ module LaunchDarkly # @private module Util - def self.is_bool(aObject) - [true,false].include? aObject - end - def self.stringify_attrs(hash, attrs) return hash if hash.nil? ret = hash From 19087f238c994ed79b8671ff4fddc4cb72fe52f9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Dec 2021 14:00:17 -0800 Subject: [PATCH 212/292] (big segments 3) implement Redis & DynamoDB big segment stores (#169) --- .../impl/integrations/dynamodb_impl.rb | 100 +++++-- .../impl/integrations/redis_impl.rb | 115 ++++++-- lib/ldclient-rb/integrations/dynamodb.rb | 39 ++- lib/ldclient-rb/integrations/redis.rb | 32 +++ spec/big_segment_store_spec_base.rb | 112 ++++++++ spec/feature_store_spec_base.rb | 265 +++++++++++------- spec/in_memory_feature_store_spec.rb | 10 +- .../integrations/consul_feature_store_spec.rb | 37 ++- .../dynamodb_feature_store_spec.rb | 103 ------- spec/integrations/dynamodb_stores_spec.rb | 150 ++++++++++ spec/integrations/redis_stores_spec.rb | 152 ++++++++++ spec/redis_feature_store_spec.rb | 121 -------- spec/spec_helper.rb | 16 ++ 13 files changed, 850 insertions(+), 402 deletions(-) create mode 100644 spec/big_segment_store_spec_base.rb delete mode 100644 spec/integrations/dynamodb_feature_store_spec.rb create mode 100644 spec/integrations/dynamodb_stores_spec.rb create mode 100644 spec/integrations/redis_stores_spec.rb delete mode 100644 spec/redis_feature_store_spec.rb diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 464eb5e4..4085e53d 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -4,10 +4,7 @@ module LaunchDarkly module Impl module Integrations module DynamoDB - # - # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. - # - class DynamoDBFeatureStoreCore + class DynamoDBStoreImplBase begin require "aws-sdk-dynamodb" AWS_SDK_ENABLED = true @@ -19,29 +16,50 @@ class DynamoDBFeatureStoreCore AWS_SDK_ENABLED = false end end - + PARTITION_KEY = "namespace" SORT_KEY = "key" - VERSION_ATTRIBUTE = "version" - ITEM_JSON_ATTRIBUTE = "item" - def initialize(table_name, opts) if !AWS_SDK_ENABLED - raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem") end - + @table_name = table_name - @prefix = opts[:prefix] + @prefix = opts[:prefix] ? (opts[:prefix] + ":") : "" @logger = opts[:logger] || Config.default_logger - + if !opts[:existing_client].nil? @client = opts[:existing_client] else @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end + + @logger.info("${description}: using DynamoDB table \"#{table_name}\"") + end + + def stop + # AWS client doesn't seem to have a close method + end - @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + protected def description + "DynamoDB" + end + end + + # + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # + class DynamoDBFeatureStoreCore < DynamoDBStoreImplBase + VERSION_ATTRIBUTE = "version" + ITEM_JSON_ATTRIBUTE = "item" + + def initialize(table_name, opts) + super(table_name, opts) + end + + def description + "DynamoDBFeatureStore" end def init_internal(all_data) @@ -124,14 +142,10 @@ def initialized_internal? !resp.item.nil? && resp.item.length > 0 end - def stop - # AWS client doesn't seem to have a close method - end - private def prefixed_namespace(base_str) - (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}" + @prefix + base_str end def namespace_for_kind(kind) @@ -208,6 +222,56 @@ def unmarshal_item(kind, item) end end + class DynamoDBBigSegmentStore < DynamoDBStoreImplBase + KEY_METADATA = 'big_segments_metadata'; + KEY_USER_DATA = 'big_segments_user'; + ATTR_SYNC_TIME = 'synchronizedOn'; + ATTR_INCLUDED = 'included'; + ATTR_EXCLUDED = 'excluded'; + + def initialize(table_name, opts) + super(table_name, opts) + end + + def description + "DynamoDBBigSegmentStore" + end + + def get_metadata + key = @prefix + KEY_METADATA + data = @client.get_item( + table_name: @table_name, + key: { + PARTITION_KEY => key, + SORT_KEY => key + } + ) + timestamp = data.item && data.item[ATTR_SYNC_TIME] ? + data.item[ATTR_SYNC_TIME] : nil + LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp) + end + + def get_membership(user_hash) + data = @client.get_item( + table_name: @table_name, + key: { + PARTITION_KEY => @prefix + KEY_USER_DATA, + SORT_KEY => user_hash + }) + return nil if !data.item + excluded_refs = data.item[ATTR_EXCLUDED] || [] + included_refs = data.item[ATTR_INCLUDED] || [] + if excluded_refs.empty? && included_refs.empty? + nil + else + membership = {} + excluded_refs.each { |ref| membership[ref] = false } + included_refs.each { |ref| membership[ref] = true } + membership + end + end + end + class DynamoDBUtil # # Calls client.batch_write_item as many times as necessary to submit all of the given requests. diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index f948e54a..193a50da 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -5,10 +5,7 @@ module LaunchDarkly module Impl module Integrations module Redis - # - # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. - # - class RedisFeatureStoreCore + class RedisStoreImplBase begin require "redis" require "connection_pool" @@ -19,22 +16,14 @@ class RedisFeatureStoreCore def initialize(opts) if !REDIS_ENABLED - raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool") end - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - ::Redis.new(@redis_opts) - end + @pool = create_redis_pool(opts) + # shutdown pool on close unless the client passed a custom pool and specified not to shutdown @pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true)) + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -42,10 +31,53 @@ def initialize(opts) @stopped = Concurrent::AtomicBoolean.new(false) with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ - and prefix: #{@prefix}") + @logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}") + end + end + + def stop + if @stopped.make_true + return unless @pool_shutdown_on_close + @pool.shutdown { |redis| redis.close } + end + end + + protected def description + "Redis" + end + + protected def with_connection + @pool.with { |redis| yield(redis) } + end + + private def create_redis_pool(opts) + redis_opts = opts[:redis_opts] ? opts[:redis_opts].clone : Hash.new + if opts[:redis_url] + redis_opts[:url] = opts[:redis_url] + end + if !redis_opts.include?(:url) + redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url + end + max_connections = opts[:max_connections] || 16 + return opts[:pool] || ConnectionPool.new(size: max_connections) do + ::Redis.new(redis_opts) end end + end + + # + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # + class RedisFeatureStoreCore < RedisStoreImplBase + def initialize(opts) + super(opts) + + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + end + + def description + "RedisFeatureStore" + end def init_internal(all_data) count = 0 @@ -103,8 +135,7 @@ def upsert_internal(kind, new_item) else final_item = old_item action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ - in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch end @@ -117,13 +148,6 @@ def initialized_internal? with_connection { |redis| redis.exists?(inited_key) } end - def stop - if @stopped.make_true - return unless @pool_shutdown_on_close - @pool.shutdown { |redis| redis.close } - end - end - private def before_update_transaction(base_key, key) @@ -142,14 +166,43 @@ def inited_key @prefix + ":$inited" end - def with_connection - @pool.with { |redis| yield(redis) } - end - def get_redis(redis, kind, key) Model.deserialize(kind, redis.hget(items_key(kind), key)) end end + + # + # Internal implementation of the Redis big segment store. + # + class RedisBigSegmentStore < RedisStoreImplBase + KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on' + KEY_USER_INCLUDE = ':big_segment_include:' + KEY_USER_EXCLUDE = ':big_segment_exclude:' + + def description + "RedisBigSegmentStore" + end + + def get_metadata + value = with_connection { |redis| redis.get(@prefix + KEY_LAST_UP_TO_DATE) } + Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i) + end + + def get_membership(user_hash) + with_connection do |redis| + included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + user_hash) + excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + user_hash) + if !included_refs && !excluded_refs + nil + else + membership = {} + excluded_refs.each { |ref| membership[ref] = false } + included_refs.each { |ref| membership[ref] = true } + membership + end + end + end + end end end end diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index c3af07d5..2f6c4ba1 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -40,7 +40,44 @@ module DynamoDB # def self.new_feature_store(table_name, opts) core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) - return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + + # + # Creates a DynamoDB-backed Big Segment store. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or + # the full `aws-sdk`. Then, put the object returned by this method into the `store` property of your + # Big Segments configuration (see `Config`). + # + # @example Configuring Big Segments + # store = LaunchDarkly::Integrations::DynamoDB::new_big_segment_store("my-table-name") + # config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # Note that the specified table must already exist in DynamoDB. It must have a partition key called + # "namespace", and a sort key called "key" (both strings). The SDK does not create the table + # automatically because it has no way of knowing what additional properties (such as permissions + # and throughput) you would want it to have. + # + # By default, the DynamoDB client will try to get your AWS credentials and region name from + # environment variables and/or local configuration files, as described in the AWS SDK documentation. + # You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an + # already-configured DynamoDB client in `existing_client`. + # + # @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`, + # except that there are no caching parameters) + # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object + # + def self.new_big_segment_store(table_name, opts) + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBBigSegmentStore.new(table_name, opts) end end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 5792d554..74af507a 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -53,6 +53,38 @@ def self.default_prefix def self.new_feature_store(opts) return RedisFeatureStore.new(opts) end + + # + # Creates a Redis-backed Big Segment store. + # + # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # documentation: https://docs.launchdarkly.com/home/users/big-segments + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `store` property of your Big Segments configuration + # (see `Config`). + # + # @example Configuring Big Segments + # store = LaunchDarkly::Integrations::Redis::new_big_segment_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`, + # except that there are no caching parameters) + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool; + # this is true by default, and should be set to false only if you are managing the pool yourself and want its + # lifecycle to be independent of the SDK client + # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object + # + def self.new_big_segment_store(opts) + return LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts) + end end end end diff --git a/spec/big_segment_store_spec_base.rb b/spec/big_segment_store_spec_base.rb new file mode 100644 index 00000000..29f344a1 --- /dev/null +++ b/spec/big_segment_store_spec_base.rb @@ -0,0 +1,112 @@ +require "spec_helper" + +# Reusable test logic for testing BigSegmentStore implementations. +# +# Usage: +# +# class MyStoreTester +# def initialize(options) +# @options = options +# end +# def create_big_segment_store +# MyBigSegmentStoreImplClass.new(@options) +# end +# def clear_data +# # clear any existing data from the database, taking @options[:prefix] into account +# end +# def set_big_segments_metadata(metadata) +# # write the metadata to the database, taking @options[:prefix] into account +# end +# def set_big_segments(user_hash, includes, excludes) +# # update the include and exclude lists for a user, taking @options[:prefix] into account +# end +# end +# +# describe "my big segment store" do +# include_examples "big_segment_store", MyStoreTester +# end + +shared_examples "big_segment_store" do |store_tester_class| + base_options = { logger: $null_logger } + + prefix_test_groups = [ + ["with default prefix", {}], + ["with specified prefix", { prefix: "testprefix" }] + ] + prefix_test_groups.each do |subgroup_description, prefix_options| + context(subgroup_description) do + # The following tests are done for each permutation of (default prefix/specified prefix) + + let(:store_tester) { store_tester_class.new(prefix_options.merge(base_options)) } + let(:fake_user_hash) { "userhash" } + + def with_empty_store + store_tester.clear_data + ensure_stop(store_tester.create_big_segment_store) do |store| + yield store + end + end + + context "get_metadata" do + it "valid value" do + expected_timestamp = 1234567890 + with_empty_store do |store| + store_tester.set_big_segments_metadata(LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(expected_timestamp)) + + actual = store.get_metadata + + expect(actual).not_to be nil + expect(actual.last_up_to_date).to eq(expected_timestamp) + end + end + + it "no value" do + with_empty_store do |store| + actual = store.get_metadata + + expect(actual).not_to be nil + expect(actual.last_up_to_date).to be nil + end + end + end + + context "get_membership" do + it "not found" do + with_empty_store do |store| + membership = store.get_membership(fake_user_hash) + membership = {} if membership.nil? + + expect(membership).to eq({}) + end + end + + it "includes only" do + with_empty_store do |store| + store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], []) + + membership = store.get_membership(fake_user_hash) + expect(membership).to eq({ "key1" => true, "key2" => true }) + end + end + + it "excludes only" do + with_empty_store do |store| + store_tester.set_big_segments(fake_user_hash, [], ["key1", "key2"]) + + membership = store.get_membership(fake_user_hash) + expect(membership).to eq({ "key1" => false, "key2" => false }) + end + end + + it "includes and excludes" do + with_empty_store do |store| + store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], ["key2", "key3"]) + + membership = store.get_membership(fake_user_hash) + expect(membership).to eq({ "key1" => true, "key2" => true, "key3" => false }) # include of key2 overrides exclude + end + end + end + end + end +end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 2d06f0ff..78fc8596 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,31 +1,56 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method, clear_data_method| - - # Rather than testing with feature flag or segment data, we'll use this fake data kind - # to make it clear that feature stores need to be able to handle arbitrary data. - let(:things_kind) { { namespace: "things" } } - - let(:key1) { "thing1" } - let(:thing1) { - { - key: key1, - name: "Thing 1", - version: 11, - deleted: false - } - } - let(:unused_key) { "no" } +# Reusable test logic for testing FeatureStore implementations. +# +# Usage: +# +# 1. For a persistent store (database integration) +# class MyStoreTester +# def initialize(options) +# @options = options # the test logic will pass in options like prefix and expiration +# end +# def create_feature_store +# MyFeatureStoreClass.new_feature_store(@options) +# end +# def clear_data +# # clear any existing data from the database, taking @options[:prefix] into account if any +# end +# end +# +# describe "my persistent feature store" do +# include_examples "persistent_feature_store", MyStoreTester +# end +# +# 2. For a non-persistent store (the in-memory implementation) +# class MyStoreTester +# def create_feature_store +# MyFeatureStoreClass.new_feature_store(@options) +# end +# end +# +# describe "my feature store" do +# include_examples "any_feature_store", MyStoreTester.new +# end - let(:create_store) { create_store_method } # just to avoid a scope issue - let(:clear_data) { clear_data_method } +# Rather than testing with feature flag or segment data, we'll use this fake data kind +# to make it clear that feature stores need to be able to handle arbitrary data. +$things_kind = { namespace: "things" } - def with_store(opts = {}) - s = create_store.call(opts) - begin - yield s - ensure - s.stop +$key1 = "$thing1" +$thing1 = { + key: $key1, + name: "Thing 1", + version: 11, + deleted: false +} +$unused_key = "no" + +shared_examples "any_feature_store" do |store_tester| + let(:store_tester) { store_tester } + + def with_store() + ensure_stop(store_tester.create_feature_store) do |store| + yield store end end @@ -34,7 +59,7 @@ def with_inited_store(things) things.each { |thing| things_hash[thing[:key].to_sym] = thing } with_store do |s| - s.init({ things_kind => things_hash }) + s.init({ $things_kind => things_hash }) yield s end end @@ -43,49 +68,9 @@ def new_version_plus(f, deltaVersion, attrs = {}) f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs) end - before(:each) do - clear_data.call if !clear_data.nil? - end - - # This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store - # that operates on a database that can be shared with other store instances (as opposed to the in-memory store, - # which has its own private storage). - if !clear_data_method.nil? - it "is not initialized by default" do - with_store do |store| - expect(store.initialized?).to eq false - end - end - - it "can detect if another instance has initialized the store" do - with_store do |store1| - store1.init({}) - with_store do |store2| - expect(store2.initialized?).to eq true - end - end - end - - it "can read data written by another instance" do - with_store do |store1| - store1.init({ things_kind => { key1.to_sym => thing1 } }) - with_store do |store2| - expect(store2.get(things_kind, key1)).to eq thing1 - end - end - end - - it "is independent from other stores with different prefixes" do - with_store({ prefix: "a" }) do |store_a| - store_a.init({ things_kind => { key1.to_sym => thing1 } }) - with_store({ prefix: "b" }) do |store_b| - store_b.init({ things_kind => {} }) - end - with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data - expect(store_b1.get(things_kind, key1)).to be_nil - expect(store_a.get(things_kind, key1)).to eq thing1 - end - end + it "is not initialized by default" do + with_store do |store| + expect(store.initialized?).to eq false end end @@ -96,27 +81,27 @@ def new_version_plus(f, deltaVersion, attrs = {}) end it "can get existing item with symbol key" do - with_inited_store([ thing1 ]) do |store| - expect(store.get(things_kind, key1.to_sym)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + expect(store.get($things_kind, $key1.to_sym)).to eq $thing1 end end it "can get existing item with string key" do - with_inited_store([ thing1 ]) do |store| - expect(store.get(things_kind, key1.to_s)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + expect(store.get($things_kind, $key1.to_s)).to eq $thing1 end end it "gets nil for nonexisting item" do - with_inited_store([ thing1 ]) do |store| - expect(store.get(things_kind, unused_key)).to be_nil + with_inited_store([ $thing1 ]) do |store| + expect(store.get($things_kind, $unused_key)).to be_nil end end it "returns nil for deleted item" do - deleted_thing = thing1.clone.merge({ deleted: true }) + deleted_thing = $thing1.clone.merge({ deleted: true }) with_inited_store([ deleted_thing ]) do |store| - expect(store.get(things_kind, key1)).to be_nil + expect(store.get($things_kind, $key1)).to be_nil end end @@ -128,8 +113,8 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 22, deleted: false } - with_inited_store([ thing1, thing2 ]) do |store| - expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 }) + with_inited_store([ $thing1, thing2 ]) do |store| + expect(store.all($things_kind)).to eq ({ $key1.to_sym => $thing1, key2.to_sym => thing2 }) end end @@ -141,60 +126,60 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 22, deleted: true } - with_inited_store([ thing1, thing2 ]) do |store| - expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 }) + with_inited_store([ $thing1, thing2 ]) do |store| + expect(store.all($things_kind)).to eq ({ $key1.to_sym => $thing1 }) end end it "can add new item" do with_inited_store([]) do |store| - store.upsert(things_kind, thing1) - expect(store.get(things_kind, key1)).to eq thing1 + store.upsert($things_kind, $thing1) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "can update item with newer version" do - with_inited_store([ thing1 ]) do |store| - thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' }) - store.upsert(things_kind, thing1_mod) - expect(store.get(things_kind, key1)).to eq thing1_mod + with_inited_store([ $thing1 ]) do |store| + $thing1_mod = new_version_plus($thing1, 1, { name: $thing1[:name] + ' updated' }) + store.upsert($things_kind, $thing1_mod) + expect(store.get($things_kind, $key1)).to eq $thing1_mod end end it "cannot update item with same version" do - with_inited_store([ thing1 ]) do |store| - thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' }) - store.upsert(things_kind, thing1_mod) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + $thing1_mod = $thing1.clone.merge({ name: $thing1[:name] + ' updated' }) + store.upsert($things_kind, $thing1_mod) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "cannot update feature with older version" do - with_inited_store([ thing1 ]) do |store| - thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' }) - store.upsert(things_kind, thing1_mod) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + $thing1_mod = new_version_plus($thing1, -1, { name: $thing1[:name] + ' updated' }) + store.upsert($things_kind, $thing1_mod) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "can delete item with newer version" do - with_inited_store([ thing1 ]) do |store| - store.delete(things_kind, key1, thing1[:version] + 1) - expect(store.get(things_kind, key1)).to be_nil + with_inited_store([ $thing1 ]) do |store| + store.delete($things_kind, $key1, $thing1[:version] + 1) + expect(store.get($things_kind, $key1)).to be_nil end end it "cannot delete item with same version" do - with_inited_store([ thing1 ]) do |store| - store.delete(things_kind, key1, thing1[:version]) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + store.delete($things_kind, $key1, $thing1[:version]) + expect(store.get($things_kind, $key1)).to eq $thing1 end end it "cannot delete item with older version" do - with_inited_store([ thing1 ]) do |store| - store.delete(things_kind, key1, thing1[:version] - 1) - expect(store.get(things_kind, key1)).to eq thing1 + with_inited_store([ $thing1 ]) do |store| + store.delete($things_kind, $key1, $thing1[:version] - 1) + expect(store.get($things_kind, $key1)).to eq $thing1 end end @@ -211,3 +196,77 @@ def new_version_plus(f, deltaVersion, attrs = {}) end end end + +shared_examples "persistent_feature_store" do |store_tester_class| + base_options = { logger: $null_logger } + + # We'll loop through permutations of the following parameters. Note: in the future, the caching logic will + # be separated out and implemented at a higher level of the SDK, so we won't have to test it for individual + # persistent store implementations. Currently caching *is* implemented in a shared class (CachingStoreWrapper), + # but the individual store implementations are wrapping themselves in that class, so they can't be tested + # separately from it. + + caching_test_groups = [ + ["with caching", { expiration: 60 }], + ["without caching", { expiration: 0 }] + ] + prefix_test_groups = [ + ["with default prefix", {}], + ["with specified prefix", { prefix: "testprefix" }] + ] + + caching_test_groups.each do |test_group_description, caching_options| + context(test_group_description) do + + prefix_test_groups.each do |subgroup_description, prefix_options| + # The following tests are done for each permutation of (caching/no caching) and (default prefix/specified prefix) + context(subgroup_description) do + options = caching_options.merge(prefix_options).merge(base_options) + + store_tester = store_tester_class.new(base_options) + + before(:each) { store_tester.clear_data } + + include_examples "any_feature_store", store_tester + + it "can detect if another instance has initialized the store" do + ensure_stop(store_tester.create_feature_store) do |store1| + store1.init({}) + ensure_stop(store_tester.create_feature_store) do |store2| + expect(store2.initialized?).to eq true + end + end + end + + it "can read data written by another instance" do + ensure_stop(store_tester.create_feature_store) do |store1| + store1.init({ $things_kind => { $key1.to_sym => $thing1 } }) + ensure_stop(store_tester.create_feature_store) do |store2| + expect(store2.get($things_kind, $key1)).to eq $thing1 + end + end + end + end + end + + # The following tests are done for each permutation of (caching/no caching) + it "is independent from other stores with different prefixes" do + factory_a = store_tester_class.new({ prefix: "a" }.merge(caching_options).merge(base_options)) + factory_b = store_tester_class.new({ prefix: "b" }.merge(caching_options).merge(base_options)) + factory_a.clear_data + factory_b.clear_data + + ensure_stop(factory_a.create_feature_store) do |store_a| + store_a.init({ $things_kind => { $key1.to_sym => $thing1 } }) + ensure_stop(factory_b.create_feature_store) do |store_b1| + store_b1.init({ $things_kind => {} }) + end + ensure_stop(factory_b.create_feature_store) do |store_b2| # this ensures we're not just reading cached data + expect(store_b2.get($things_kind, $key1)).to be_nil + expect(store_a.get($things_kind, $key1)).to eq $thing1 + end + end + end + end + end +end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb index c403fc69..1d56078f 100644 --- a/spec/in_memory_feature_store_spec.rb +++ b/spec/in_memory_feature_store_spec.rb @@ -1,12 +1,14 @@ require "feature_store_spec_base" require "spec_helper" -def create_in_memory_store(opts = {}) - LaunchDarkly::InMemoryFeatureStore.new +class InMemoryStoreTester + def create_feature_store + LaunchDarkly::InMemoryFeatureStore.new + end end describe LaunchDarkly::InMemoryFeatureStore do subject { LaunchDarkly::InMemoryFeatureStore } - - include_examples "feature_store", method(:create_in_memory_store) + + include_examples "any_feature_store", InMemoryStoreTester.new end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index bad1e736..e73858fa 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -2,39 +2,34 @@ require "diplomat" require "spec_helper" - -$my_prefix = 'testprefix' +# These tests will all fail if there isn't a local Consul instance running. +# They can be disabled with LD_SKIP_DATABASE_TESTS=1 $consul_base_opts = { prefix: $my_prefix, logger: $null_log } -def create_consul_store(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( - $consul_base_opts.merge(opts).merge({ expiration: 60 })) -end +class ConsulStoreTester + def initialize(options) + @options = options + @actual_prefix = @options[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix + end -def create_consul_store_uncached(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( - $consul_base_opts.merge(opts).merge({ expiration: 0 })) -end + def clear_data + Diplomat::Kv.delete(@actual_prefix + '/', recurse: true) + end -def clear_all_data - Diplomat::Kv.delete($my_prefix + '/', recurse: true) + def create_feature_store + LaunchDarkly::Integrations::Consul.new_feature_store(@options) + end end describe "Consul feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - # These tests will all fail if there isn't a local Consul instance running. - - context "with local cache" do - include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) - end - - context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) - end + include_examples "persistent_feature_store", ConsulStoreTester end + +# There isn't a Big Segments integration for Consul. diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb deleted file mode 100644 index 3b95edc8..00000000 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ /dev/null @@ -1,103 +0,0 @@ -require "feature_store_spec_base" -require "aws-sdk-dynamodb" -require "spec_helper" - - -$table_name = 'LD_DYNAMODB_TEST_TABLE' -$endpoint = 'http://localhost:8000' -$my_prefix = 'testprefix' - -$dynamodb_opts = { - credentials: Aws::Credentials.new("key", "secret"), - region: "us-east-1", - endpoint: $endpoint -} - -$ddb_base_opts = { - dynamodb_opts: $dynamodb_opts, - prefix: $my_prefix, - logger: $null_log -} - -def create_dynamodb_store(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - $ddb_base_opts.merge(opts).merge({ expiration: 60 })) -end - -def create_dynamodb_store_uncached(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - $ddb_base_opts.merge(opts).merge({ expiration: 0 })) -end - -def clear_all_data - client = create_test_client - items_to_delete = [] - req = { - table_name: $table_name, - projection_expression: '#namespace, #key', - expression_attribute_names: { - '#namespace' => 'namespace', - '#key' => 'key' - } - } - while true - resp = client.scan(req) - items_to_delete = items_to_delete + resp.items - break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 - req.exclusive_start_key = resp.last_evaluated_key - end - requests = items_to_delete.map do |item| - { delete_request: { key: item } } - end - LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests) -end - -def create_table_if_necessary - client = create_test_client - begin - client.describe_table({ table_name: $table_name }) - return # no error, table exists - rescue Aws::DynamoDB::Errors::ResourceNotFoundException - # fall through to code below - we'll create the table - end - - req = { - table_name: $table_name, - key_schema: [ - { attribute_name: "namespace", key_type: "HASH" }, - { attribute_name: "key", key_type: "RANGE" } - ], - attribute_definitions: [ - { attribute_name: "namespace", attribute_type: "S" }, - { attribute_name: "key", attribute_type: "S" } - ], - provisioned_throughput: { - read_capacity_units: 1, - write_capacity_units: 1 - } - } - client.create_table(req) - - # When DynamoDB creates a table, it may not be ready to use immediately -end - -def create_test_client - Aws::DynamoDB::Client.new($dynamodb_opts) -end - - -describe "DynamoDB feature store" do - break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - - # These tests will all fail if there isn't a local DynamoDB instance running. - - create_table_if_necessary - - context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) - end - - context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) - end -end diff --git a/spec/integrations/dynamodb_stores_spec.rb b/spec/integrations/dynamodb_stores_spec.rb new file mode 100644 index 00000000..8f7c5c07 --- /dev/null +++ b/spec/integrations/dynamodb_stores_spec.rb @@ -0,0 +1,150 @@ +require "big_segment_store_spec_base" +require "feature_store_spec_base" +require "aws-sdk-dynamodb" +require "spec_helper" + +# These tests will all fail if there isn't a local DynamoDB instance running. +# They can be disabled with LD_SKIP_DATABASE_TESTS=1 + +$DynamoDBBigSegmentStore = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBBigSegmentStore + +class DynamoDBStoreTester + TABLE_NAME = 'LD_DYNAMODB_TEST_TABLE' + DYNAMODB_OPTS = { + credentials: Aws::Credentials.new("key", "secret"), + region: "us-east-1", + endpoint: "http://localhost:8000" + } + FEATURE_STORE_BASE_OPTS = { + dynamodb_opts: DYNAMODB_OPTS, + prefix: 'testprefix', + logger: $null_log + } + + def initialize(options = {}) + @options = options.clone + @options[:dynamodb_opts] = DYNAMODB_OPTS + @actual_prefix = options[:prefix] ? "#{options[:prefix]}:" : "" + end + + def self.create_test_client + Aws::DynamoDB::Client.new(DYNAMODB_OPTS) + end + + def self.create_table_if_necessary + client = create_test_client + begin + client.describe_table({ table_name: TABLE_NAME }) + return # no error, table exists + rescue Aws::DynamoDB::Errors::ResourceNotFoundException + # fall through to code below - we'll create the table + end + + req = { + table_name: TABLE_NAME, + key_schema: [ + { attribute_name: "namespace", key_type: "HASH" }, + { attribute_name: "key", key_type: "RANGE" } + ], + attribute_definitions: [ + { attribute_name: "namespace", attribute_type: "S" }, + { attribute_name: "key", attribute_type: "S" } + ], + provisioned_throughput: { + read_capacity_units: 1, + write_capacity_units: 1 + } + } + client.create_table(req) + + # When DynamoDB creates a table, it may not be ready to use immediately + end + + def clear_data + client = self.class.create_test_client + items_to_delete = [] + req = { + table_name: TABLE_NAME, + projection_expression: '#namespace, #key', + expression_attribute_names: { + '#namespace' => 'namespace', + '#key' => 'key' + } + } + while true + resp = client.scan(req) + resp.items.each do |item| + if !@actual_prefix || item["namespace"].start_with?(@actual_prefix) + items_to_delete.push(item) + end + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + requests = items_to_delete.map do |item| + { delete_request: { key: item } } + end + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, TABLE_NAME, requests) + end + + def create_feature_store + LaunchDarkly::Integrations::DynamoDB::new_feature_store(TABLE_NAME, @options) + end + + def create_big_segment_store + LaunchDarkly::Integrations::DynamoDB::new_big_segment_store(TABLE_NAME, @options) + end + + def set_big_segments_metadata(metadata) + client = self.class.create_test_client + key = @actual_prefix + $DynamoDBBigSegmentStore::KEY_METADATA + client.put_item( + table_name: TABLE_NAME, + item: { + "namespace" => key, + "key" => key, + $DynamoDBBigSegmentStore::ATTR_SYNC_TIME => metadata.last_up_to_date + } + ) + end + + def set_big_segments(user_hash, includes, excludes) + client = self.class.create_test_client + sets = { + $DynamoDBBigSegmentStore::ATTR_INCLUDED => Set.new(includes), + $DynamoDBBigSegmentStore::ATTR_EXCLUDED => Set.new(excludes) + } + sets.each do |attr_name, values| + if !values.empty? + client.update_item( + table_name: TABLE_NAME, + key: { + "namespace" => @actual_prefix + $DynamoDBBigSegmentStore::KEY_USER_DATA, + "key" => user_hash + }, + update_expression: "ADD #{attr_name} :value", + expression_attribute_values: { + ":value" => values + } + ) + end + end + end +end + + +describe "DynamoDB feature store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + DynamoDBStoreTester.create_table_if_necessary + + include_examples "persistent_feature_store", DynamoDBStoreTester +end + +describe "DynamoDB big segment store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + DynamoDBStoreTester.create_table_if_necessary + + include_examples "big_segment_store", DynamoDBStoreTester +end diff --git a/spec/integrations/redis_stores_spec.rb b/spec/integrations/redis_stores_spec.rb new file mode 100644 index 00000000..4f26cbb0 --- /dev/null +++ b/spec/integrations/redis_stores_spec.rb @@ -0,0 +1,152 @@ +require "ldclient-rb/impl/integrations/redis_impl" + +require "big_segment_store_spec_base" +require "feature_store_spec_base" +require "spec_helper" + +require "redis" + +# These tests will all fail if there isn't a local Redis instance running. +# They can be disabled with LD_SKIP_DATABASE_TESTS=1 + +$RedisBigSegmentStore = LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore + +def with_redis_test_client + ensure_close(Redis.new({ url: "redis://localhost:6379" })) do |client| + yield client + end +end + + +class RedisStoreTester + def initialize(options) + @options = options + @actual_prefix = @options[:prefix] ||LaunchDarkly::Integrations::Redis.default_prefix + end + + def clear_data + with_redis_test_client do |client| + keys = client.keys("#{@actual_prefix}:*") + keys.each { |key| client.del(key) } + end + end + + def create_feature_store + LaunchDarkly::Integrations::Redis::new_feature_store(@options) + end + + def create_big_segment_store + LaunchDarkly::Integrations::Redis.new_big_segment_store(@options) + end + + def set_big_segments_metadata(metadata) + with_redis_test_client do |client| + client.set(@actual_prefix + $RedisBigSegmentStore::KEY_LAST_UP_TO_DATE, + metadata.last_up_to_date.nil? ? "" : metadata.last_up_to_date.to_s) + end + end + + def set_big_segments(user_hash, includes, excludes) + with_redis_test_client do |client| + includes.each do |ref| + client.sadd(@actual_prefix + $RedisBigSegmentStore::KEY_USER_INCLUDE + user_hash, ref) + end + excludes.each do |ref| + client.sadd(@actual_prefix + $RedisBigSegmentStore::KEY_USER_EXCLUDE + user_hash, ref) + end + end + end +end + + +describe "Redis feature store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + include_examples "persistent_feature_store", RedisStoreTester + + def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) + test_hook = Object.new + version_counter = start_version + expect(test_hook).to receive(:before_update_transaction) { |base_key, key| + if version_counter <= end_version + new_flag = flag.clone + new_flag[:version] = version_counter + other_client.hset(base_key, key, new_flag.to_json) + version_counter = version_counter + 1 + end + }.at_least(:once) + test_hook + end + + tester = RedisStoreTester.new({ logger: $null_logger }) + + it "handles upsert race condition against external client with lower version" do + with_redis_test_client do |other_client| + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) + tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) + + ensure_stop(tester.create_feature_store) do |store| + store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) + + my_ver = { key: "foo", version: 10 } + store.upsert(LaunchDarkly::FEATURES, my_ver) + result = store.get(LaunchDarkly::FEATURES, flag[:key]) + expect(result[:version]).to eq 10 + end + end + end + + it "handles upsert race condition against external client with higher version" do + with_redis_test_client do |other_client| + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) + tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) + + ensure_stop(tester.create_feature_store) do |store| + store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) + + my_ver = { key: "foo", version: 2 } + store.upsert(LaunchDarkly::FEATURES, my_ver) + result = store.get(LaunchDarkly::FEATURES, flag[:key]) + expect(result[:version]).to eq 3 + end + end + end + + it "shuts down a custom Redis pool by default" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + tester = RedisStoreTester.new({ pool: unowned_pool, logger: $null_logger }) + store = tester.create_feature_store + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end + + it "doesn't shut down a custom Redis pool if pool_shutdown_on_close = false" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + tester = RedisStoreTester.new({ pool: unowned_pool, pool_shutdown_on_close: false, logger: $null_logger }) + store = tester.create_feature_store + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.not_to raise_error + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end +end + +describe "Redis big segment store" do + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + + include_examples "big_segment_store", RedisStoreTester +end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb deleted file mode 100644 index 6dd5733e..00000000 --- a/spec/redis_feature_store_spec.rb +++ /dev/null @@ -1,121 +0,0 @@ -require "feature_store_spec_base" -require "connection_pool" -require "json" -require "redis" -require "spec_helper" - - -$my_prefix = 'testprefix' - -$base_opts = { - prefix: $my_prefix, - logger: $null_log -} - -def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) -end - -def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) -end - -def clear_all_data - client = Redis.new - client.flushdb -end - - -describe LaunchDarkly::RedisFeatureStore do - subject { LaunchDarkly::RedisFeatureStore } - - break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - - # These tests will all fail if there isn't a Redis instance running on the default port. - - context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) - end - - context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) - end - - def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) - test_hook = Object.new - version_counter = start_version - expect(test_hook).to receive(:before_update_transaction) { |base_key, key| - if version_counter <= end_version - new_flag = flag.clone - new_flag[:version] = version_counter - other_client.hset(base_key, key, new_flag.to_json) - version_counter = version_counter + 1 - end - }.at_least(:once) - test_hook - end - - it "handles upsert race condition against external client with lower version" do - other_client = Redis.new({ url: "redis://localhost:6379" }) - flag = { key: "foo", version: 1 } - test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) - store = create_redis_store({ test_hook: test_hook }) - - begin - store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - - my_ver = { key: "foo", version: 10 } - store.upsert(LaunchDarkly::FEATURES, my_ver) - result = store.get(LaunchDarkly::FEATURES, flag[:key]) - expect(result[:version]).to eq 10 - ensure - other_client.close - end - end - - it "handles upsert race condition against external client with higher version" do - other_client = Redis.new({ url: "redis://localhost:6379" }) - flag = { key: "foo", version: 1 } - test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) - store = create_redis_store({ test_hook: test_hook }) - - begin - store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - - my_ver = { key: "foo", version: 2 } - store.upsert(LaunchDarkly::FEATURES, my_ver) - result = store.get(LaunchDarkly::FEATURES, flag[:key]) - expect(result[:version]).to eq 3 - ensure - other_client.close - end - end - - it "shuts down a custom Redis pool by default" do - unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } - store = create_redis_store({ pool: unowned_pool }) - - begin - store.init(LaunchDarkly::FEATURES => { }) - store.stop - - expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) - ensure - unowned_pool.shutdown { |conn| conn.close } - end - end - - it "doesn't shut down a custom Redis pool if pool_shutdown_on_close = false" do - unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } - store = create_redis_store({ pool: unowned_pool, pool_shutdown_on_close: false }) - - begin - store.init(LaunchDarkly::FEATURES => { }) - store.stop - - expect { unowned_pool.with {} }.not_to raise_error(ConnectionPool::PoolShuttingDownError) - ensure - unowned_pool.shutdown { |conn| conn.close } - end - end -end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 8438ecc2..c54ef444 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -3,6 +3,22 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL +def ensure_close(thing) + begin + yield thing + ensure + thing.close + end +end + +def ensure_stop(thing) + begin + yield thing + ensure + thing.stop + end +end + RSpec.configure do |config| config.before(:each) do end From 201a61ccbb281b589c3215b8a59f41f4657e6fa4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 3 Dec 2021 17:32:24 -0800 Subject: [PATCH 213/292] add missing import --- lib/ldclient-rb/ldclient.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index fc4ad173..a8719773 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/big_segments" require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/evaluator" require "ldclient-rb/impl/event_factory" From 91a7de345ebbc577677328e822a195d02b21a7e1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 6 Dec 2021 10:31:59 -0800 Subject: [PATCH 214/292] fix stale calculation --- lib/ldclient-rb/impl/big_segments.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index eb709246..016221d5 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -72,7 +72,7 @@ def poll_store_and_update_status end def is_stale(timestamp) - (Impl::Util.current_time_millis - timestamp) >= @stale_after_millis + !timestamp || ((Impl::Util.current_time_millis - timestamp) >= @stale_after_millis) end def self.hash_for_user_key(user_key) From ece645414bed4ce50158a0c1ba339137d3209e2f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 7 Dec 2021 18:58:38 -0800 Subject: [PATCH 215/292] fix big segments user hash algorithm to use SHA256 --- lib/ldclient-rb/impl/big_segments.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index 016221d5..69f40bb7 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -76,7 +76,7 @@ def is_stale(timestamp) end def self.hash_for_user_key(user_key) - Digest::MD5.base64digest(user_key) + Digest::SHA256.base64digest(user_key) end end From a11d2865635545a2925f9f91e41c42db4ee0acdb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 Dec 2021 12:10:00 -0800 Subject: [PATCH 216/292] improve & refactor client/evaluation tests --- spec/ldclient_end_to_end_spec.rb | 114 ++--- spec/ldclient_evaluation_spec.rb | 305 ++++++++++++++ spec/ldclient_events_spec.rb | 306 ++++++++++++++ spec/ldclient_listeners_spec.rb | 18 +- spec/ldclient_spec.rb | 692 ++++--------------------------- spec/ldclient_spec_base.rb | 42 -- spec/mock_components.rb | 79 ++++ spec/model_builders.rb | 154 +++++++ 8 files changed, 977 insertions(+), 733 deletions(-) create mode 100644 spec/ldclient_evaluation_spec.rb create mode 100644 spec/ldclient_events_spec.rb delete mode 100644 spec/ldclient_spec_base.rb create mode 100644 spec/model_builders.rb diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index cbefcf6c..19c6c241 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -1,5 +1,5 @@ require "http_util" -require "ldclient_spec_base" +require "mock_components" require "spec_helper" @@ -14,20 +14,14 @@ module LaunchDarkly # Note that we can't do end-to-end tests in streaming mode until we have a test server that can do streaming # responses, which is difficult in WEBrick. - describe "LDClient end-to-end", :ldclient_spec_base => true do + describe "LDClient end-to-end" do it "starts in polling mode" do with_server do |poll_server| poll_server.setup_ok_response("/sdk/latest-all", DATA_WITH_ALWAYS_TRUE_FLAG.to_json, "application/json") - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - send_events: false, - logger: null_logger - ) - with_client(config) do |client| + with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be true - expect(client.variation(ALWAYS_TRUE_FLAG[:key], user, false)).to be true + expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be true end end end @@ -36,74 +30,57 @@ module LaunchDarkly with_server do |poll_server| poll_server.setup_status_response("/sdk/latest-all", 401) - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - send_events: false, - logger: null_logger - ) - with_client(config) do |client| + with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be false - expect(client.variation(ALWAYS_TRUE_FLAG[:key], user, false)).to be false + expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be false end end end it "sends event without diagnostics" do - with_server do |poll_server| - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - events_uri: events_server.base_uri.to_s, - diagnostic_opt_out: true, - logger: null_logger - ) - with_client(config) do |client| - client.identify(user) - client.flush + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + + config = test_config( + send_events: true, + events_uri: events_server.base_uri.to_s, + diagnostic_opt_out: true + ) + with_client(config) do |client| + client.identify(basic_user) + client.flush - req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ sdk_key ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data.length).to eq 1 - expect(data[0]["kind"]).to eq "identify" - end + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" end end end it "sends diagnostic event" do - with_server do |poll_server| - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - events_server.setup_ok_response("/diagnostic", "") - poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - - config = Config.new( - stream: false, - base_uri: poll_server.base_uri.to_s, - events_uri: events_server.base_uri.to_s, - logger: null_logger - ) - with_client(config) do |client| - user = { key: 'userkey' } - client.identify(user) - client.flush + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + events_server.setup_ok_response("/diagnostic", "") + + config = test_config( + send_events: true, + events_uri: events_server.base_uri.to_s + ) + with_client(config) do |client| + client.identify(basic_user) + client.flush - req0, body0 = events_server.await_request_with_body - req1, body1 = events_server.await_request_with_body - req = req0.path == "/diagnostic" ? req0 : req1 - body = req0.path == "/diagnostic" ? body0 : body1 - expect(req.header['authorization']).to eq [ sdk_key ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data["kind"]).to eq "diagnostic-init" - end + req0, body0 = events_server.await_request_with_body + req1, body1 = events_server.await_request_with_body + req = req0.path == "/diagnostic" ? req0 : req1 + body = req0.path == "/diagnostic" ? body0 : body1 + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data["kind"]).to eq "diagnostic-init" end end end @@ -114,19 +91,20 @@ module LaunchDarkly events_server.setup_ok_response("/bulk", "") poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - config = Config.new( + config = test_config( stream: false, + data_source: nil, + send_events: true, base_uri: "http://fake-polling-server", events_uri: "http://fake-events-server", diagnostic_opt_out: true, - logger: null_logger, socket_factory: SocketFactoryFromHash.new({ "fake-polling-server" => poll_server.port, "fake-events-server" => events_server.port }) ) with_client(config) do |client| - client.identify(user) + client.identify(basic_user) client.flush req, body = events_server.await_request_with_body diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb new file mode 100644 index 00000000..da104e22 --- /dev/null +++ b/spec/ldclient_evaluation_spec.rb @@ -0,0 +1,305 @@ +require "ldclient-rb" + +require "mock_components" +require "model_builders" +require "spec_helper" + +module LaunchDarkly + describe "LDClient evaluation tests" do + context "variation" do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do + ensure_close(LDClient.new(sdk_key, test_config(offline: true))) do |offline_client| + result = offline_client.variation("doesntmatter", basic_user, "default") + expect(result).to eq "default" + end + end + + it "returns the default value for an unknown feature" do + ensure_close(LDClient.new(sdk_key, test_config())) do |client| + expect(client.variation("badkey", basic_user, "default")).to eq "default" + end + end + + it "returns the value for an existing feature" do + flag = FlagBuilder.new("flagkey").off_with_value("value").build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(client.variation("flagkey", basic_user, "default")).to eq "value" + end + end + + it "returns the default value if a feature evaluates to nil" do + flag = FlagBuilder.new("flagkey").on(false).off_variation(nil).build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(client.variation("flagkey", basic_user, "default")).to eq "default" + end + end + + it "can evaluate a flag that references a segment" do + segment = SegmentBuilder.new("segmentkey").included(basic_user[:key]).build + flag = FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build + store = InMemoryFeatureStore.new + store.upsert(SEGMENTS, segment) + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(client.variation("flagkey", basic_user, false)).to be true + end + end + + it "can evaluate a flag that references a big segment" do + segment = SegmentBuilder.new("segmentkey").unbounded(true).generation(1).build + flag = FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build + store = InMemoryFeatureStore.new + store.upsert(SEGMENTS, segment) + store.upsert(FEATURES, flag) + + segstore = MockBigSegmentStore.new + segstore.setup_segment_for_user(basic_user[:key], segment, true) + big_seg_config = BigSegmentsConfig.new(store: segstore) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, big_segments: big_seg_config))) do |client| + expect(client.variation("flagkey", basic_user, false)).to be true + end + end + end + + context "variation_detail" do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do + offline_client = LDClient.new(sdk_key, test_config(offline: true)) + result = offline_client.variation_detail("doesntmatter", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_CLIENT_NOT_READY)) + expect(result).to eq expected + end + + it "returns the default value for an unknown feature" do + client = LDClient.new(sdk_key, test_config()) + result = client.variation_detail("badkey", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)) + expect(result).to eq expected + end + + it "returns a value for an existing feature" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.upsert(FEATURES, feature_with_value) + result = client.variation_detail("key", basic_user, "default") + expected = EvaluationDetail.new("value", 0, EvaluationReason::off) + expect(result).to eq expected + end + + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.upsert(FEATURES, empty_feature) + result = client.variation_detail("key", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::off) + expect(result).to eq expected + expect(result.default_value?).to be true + end + + it "includes big segment status in reason when evaluating a flag that references a big segment" do + segment = SegmentBuilder.new("segmentkey").unbounded(true).generation(1).build + flag = FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build + store = InMemoryFeatureStore.new + store.upsert(SEGMENTS, segment) + store.upsert(FEATURES, flag) + + segstore = MockBigSegmentStore.new + segstore.setup_segment_for_user(basic_user[:key], segment, true) + segstore.setup_metadata(Time.now) + big_seg_config = BigSegmentsConfig.new(store: segstore) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, big_segments: big_seg_config))) do |client| + result = client.variation_detail("flagkey", basic_user, false) + expect(result.value).to be true + expect(result.reason.big_segments_status).to eq(BigSegmentsStatus::HEALTHY) + end + end + end + + describe "all_flags" do + let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } + let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } + + it "returns flag values" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({ key: 'userkey' }) + expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "returns empty map for nil user" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags(nil) + expect(result).to eq({}) + end + + it "returns empty map for nil user key" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({}) + expect(result).to eq({}) + end + + it "returns empty map if offline" do + store = InMemoryFeatureStore.new + offline_client = LDClient.new(sdk_key, test_config(offline: true, feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = offline_client.all_flags(nil) + expect(result).to eq({}) + end + end + + context "all_flags_state" do + let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } + let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } + + it "returns flags state" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + :variation => 0, + :version => 100 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 + } + }, + '$valid' => true + }) + end + + it "can be filtered for only client-side flags" do + flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } + flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } + flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } + flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } + + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { + flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 + }}) + + state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + end + + it "can omit details for untracked flags" do + future_time = (Time.now.to_f * 1000).to_i + 100000 + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } + + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + + state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + 'key3' => 'value3', + '$flagsState' => { + 'key1' => { + :variation => 0 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true + }, + 'key3' => { + :variation => 1, + :version => 300, + :debugEventsUntilDate => future_time + } + }, + '$valid' => true + }) + end + + it "returns empty state for nil user" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state(nil) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state for nil user key" do + store = InMemoryFeatureStore.new + client = LDClient.new(sdk_key, test_config(feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({}) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state if offline" do + store = InMemoryFeatureStore.new + offline_client = LDClient.new(sdk_key, test_config(offline: true, feature_store: store)) + store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + end +end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb new file mode 100644 index 00000000..c88c2c00 --- /dev/null +++ b/spec/ldclient_events_spec.rb @@ -0,0 +1,306 @@ +require "ldclient-rb" + +require "mock_components" +require "model_builders" +require "spec_helper" + +module LaunchDarkly + describe "LDClient events tests" do + def event_processor(client) + client.instance_variable_get(:@event_processor) + end + + it 'uses NullEventProcessor if send_events is false' do + ensure_close(LDClient.new(sdk_key, test_config(send_events: false))) do |client| + expect(event_processor(client)).to be_a(LaunchDarkly::NullEventProcessor) + end + end + + context "evaluation events - variation" do + it "unknown flag" do + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default" + )) + client.variation("badkey", basic_user, "default") + end + end + + it "known flag" do + flag = FlagBuilder.new("flagkey").version(100).off_with_value("value"). + track_events(true).debug_events_until_date(1000).build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: flag[:key], + version: flag[:version], + user: basic_user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000 + )) + client.variation(flag[:key], basic_user, "default") + end + end + + it "does not send event, and logs error, if user is nil" do + flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:error) + client.variation(flag[:key], nil, "default") + end + end + + it "does not send event, and logs warning, if user key is nil" do + flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + logger = double().as_null_object + keyless_user = { key: nil } + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.variation(flag[:key], keyless_user, "default") + end + end + + it "sets trackEvents and reason if trackEvents is set for matched rule" do + flag = FlagBuilder.new("flagkey").version(100).on(true).variations("value"). + rule(RuleBuilder.new.variation(0).id("id").track_events(true). + clause(Clauses.match_user(basic_user))). + build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: flag[:key], + version: flag[:version], + user: basic_user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') + )) + client.variation(flag[:key], basic_user, "default") + end + end + + it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + flag = FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0). + track_events_fallthrough(true).build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: flag[:key], + version: flag[:version], + user: basic_user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + reason: LaunchDarkly::EvaluationReason::fallthrough + )) + client.variation(flag[:key], basic_user, "default") + end + end + end + + context "evaluation events - variation_detail" do + it "unknown flag" do + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default", + reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) + )) + client.variation_detail("badkey", basic_user, "default") + end + end + + it "known flag" do + flag = FlagBuilder.new("flagkey").version(100).off_with_value("value"). + track_events(true).debug_events_until_date(1000).build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "feature", + key: flag[:key], + version: flag[:version], + user: basic_user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000, + reason: LaunchDarkly::EvaluationReason::off + )) + client.variation_detail(flag[:key], basic_user, "default") + end + end + + it "does not send event, and logs error, if user is nil" do + flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:error) + client.variation_detail(flag[:key], nil, "default") + end + end + + it "does not send event, and logs warning, if user key is nil" do + flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build + store = InMemoryFeatureStore.new + store.upsert(FEATURES, flag) + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.variation_detail(flag[:key], { key: nil }, "default") + end + end + end + + context "identify" do + it "queues up an identify event" do + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "identify", key: basic_user[:key], user: basic_user)) + client.identify(basic_user) + end + end + + it "does not send event, and logs warning, if user is nil" do + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(nil) + end + end + + it "does not send event, and logs warning, if user key is nil" do + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify({ key: nil }) + end + end + end + + context "track" do + it "queues up an custom event" do + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: basic_user, data: 42)) + client.track("custom_event_name", basic_user, 42) + end + end + + it "can include a metric value" do + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: basic_user, metricValue: 1.5)) + client.track("custom_event_name", basic_user, nil, 1.5) + end + end + + it "includes contextKind with anonymous user" do + anon_user = { key: 'user-key', anonymous: true } + + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: anon_user, metricValue: 2.2, contextKind: "anonymousUser")) + client.track("custom_event_name", anon_user, nil, 2.2) + end + end + + it "sanitizes the user in the event" do + numeric_key_user = { key: 33 } + sanitized_user = { key: "33" } + + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including(user: sanitized_user)) + client.track("custom_event_name", numeric_key_user, nil) + end + end + + it "does not send event, and logs a warning, if user is nil" do + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", nil, nil) + end + end + + it "does not send event, and logs warning, if user key is nil" do + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", { key: nil }, nil) + end + end + end + + context "alias" do + it "queues up an alias event" do + anon_user = { key: "user-key", anonymous: true } + + ensure_close(LDClient.new(sdk_key, test_config)) do |client| + expect(event_processor(client)).to receive(:add_event).with(hash_including( + kind: "alias", key: basic_user[:key], contextKind: "user", previousKey: anon_user[:key], previousContextKind: "anonymousUser")) + client.alias(basic_user, anon_user) + end + end + + it "does not send event, and logs warning, if user is nil" do + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias(nil, nil) + end + end + + it "does not send event, and logs warning, if user key is nil" do + logger = double().as_null_object + + ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + expect(event_processor(client)).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias({ key: nil }, { key: nil }) + end + end + end + end +end diff --git a/spec/ldclient_listeners_spec.rb b/spec/ldclient_listeners_spec.rb index b86bbb82..8628f75b 100644 --- a/spec/ldclient_listeners_spec.rb +++ b/spec/ldclient_listeners_spec.rb @@ -1,12 +1,11 @@ -require "ldclient_spec_base" require "mock_components" require "spec_helper" module LaunchDarkly - describe "LDClient event listeners/observers", :ldclient_spec_base => true do + describe "LDClient event listeners/observers" do context "big_segment_store_status_provider" do it "returns unavailable status when not configured" do - with_client(base_config) do |client| + with_client(test_config) do |client| status = client.big_segment_store_status_provider.status expect(status.available).to be(false) expect(status.stale).to be(false) @@ -16,16 +15,11 @@ module LaunchDarkly it "sends status updates" do store = MockBigSegmentStore.new store.setup_metadata(Time.now) - config = Config.new( - big_segments: BigSegmentsConfig.new( - store: store, - status_poll_interval: 0.01 - ), - send_events: false, - data_source: null_data_source, - logger: null_logger + big_segments_config = BigSegmentsConfig.new( + store: store, + status_poll_interval: 0.01 ) - with_client(config) do |client| + with_client(test_config(big_segments: big_segments_config)) do |client| status1 = client.big_segment_store_status_provider.status expect(status1.available).to be(true) expect(status1.stale).to be(false) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 8e2ef650..6b15245d 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -1,632 +1,102 @@ +require "mock_components" require "spec_helper" +module LaunchDarkly + describe LDClient do + subject { LDClient } -describe LaunchDarkly::LDClient do - subject { LaunchDarkly::LDClient } - let(:offline_config) { LaunchDarkly::Config.new({offline: true}) } - let(:offline_client) do - subject.new("secret", offline_config) - end - let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } - let(:logger) { double().as_null_object } - let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } - let(:client) do - subject.new("secret", config) - end - let(:feature) do - data = File.read(File.join("spec", "fixtures", "feature.json")) - JSON.parse(data, symbolize_names: true) - end - let(:user) do - { - key: "user@test.com", - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:user_anonymous) do - { - key: "anonymous@test.com", - anonymous: true - } - end - let(:numeric_key_user) do - { - key: 33, - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:sanitized_numeric_key_user) do - { - key: "33", - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:user_without_key) do - { name: "Keyless Joe" } - end - - def event_processor - client.instance_variable_get(:@event_processor) - end - - describe "constructor requirement of non-nil sdk key" do - it "is not enforced when offline" do - subject.new(nil, offline_config) - end - - it "is not enforced if use_ldd is true and send_events is false" do - subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true, send_events: false })) - end - - it "is not enforced if using file data and send_events is false" do - source = LaunchDarkly::FileDataSource.factory({}) - subject.new(nil, LaunchDarkly::Config.new({ data_source: source, send_events: false })) - end - - it "is enforced in streaming mode even if send_events is false" do - expect { - subject.new(nil, LaunchDarkly::Config.new({ send_events: false })) - }.to raise_error(ArgumentError) - end - - it "is enforced in polling mode even if send_events is false" do - expect { - subject.new(nil, LaunchDarkly::Config.new({ stream: false, send_events: false })) - }.to raise_error(ArgumentError) - end - - it "is enforced if use_ldd is true and send_events is true" do - expect { - subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true })) - }.to raise_error(ArgumentError) - end - - it "is enforced if using file data and send_events is true" do - source = LaunchDarkly::FileDataSource.factory({}) - expect { - subject.new(nil, LaunchDarkly::Config.new({ data_source: source })) - }.to raise_error(ArgumentError) - end - end - - describe '#variation' do - feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, - trackEvents: true, debugEventsUntilDate: 1000 } - - it "returns the default value if the client is offline" do - result = offline_client.variation("doesntmatter", user, "default") - expect(result).to eq "default" - end - - it "returns the default value for an unknown feature" do - expect(client.variation("badkey", user, "default")).to eq "default" - end - - it "queues a feature request event for an unknown feature" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", key: "badkey", user: user, value: "default", default: "default" - )) - client.variation("badkey", user, "default") - end - - it "returns the value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(client.variation("key", user, "default")).to eq "value" - end - - it "returns the default value if a feature evaluates to nil" do - empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) - expect(client.variation("key", user, "default")).to eq "default" - end - - it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: user, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) - client.variation("key", user, "default") - end - - it "does not send an event if user is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:error) - client.variation("key", nil, "default") - end - - it "queues a feature event for an existing feature when user is anonymous" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - contextKind: "anonymousUser", - user: user_anonymous, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) - client.variation("key", user_anonymous, "default") - end - - it "does not queue a feature event for an existing feature when user key is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - bad_user = { name: "Bob" } - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.variation("key", bad_user, "default") - end - - it "sets trackEvents and reason if trackEvents is set for matched rule" do - flag = { - key: 'flag', - on: true, - variations: [ 'value' ], - version: 100, - rules: [ - clauses: [ - { attribute: 'key', op: 'in', values: [ user[:key] ] } - ], - variation: 0, - id: 'id', - trackEvents: true - ] - } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: 'feature', - key: 'flag', - version: 100, - user: user, - value: 'value', - default: 'default', - trackEvents: true, - reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') - )) - client.variation('flag', user, 'default') - end - - it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do - flag = { - key: 'flag', - on: true, - variations: [ 'value' ], - fallthrough: { variation: 0 }, - version: 100, - rules: [], - trackEventsFallthrough: true - } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: 'feature', - key: 'flag', - version: 100, - user: user, - value: 'value', - default: 'default', - trackEvents: true, - reason: LaunchDarkly::EvaluationReason::fallthrough - )) - client.variation('flag', user, 'default') - end - end - - describe '#variation_detail' do - feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, - trackEvents: true, debugEventsUntilDate: 1000 } - - it "returns the default value if the client is offline" do - result = offline_client.variation_detail("doesntmatter", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, - LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_CLIENT_NOT_READY)) - expect(result).to eq expected - end - - it "returns the default value for an unknown feature" do - result = client.variation_detail("badkey", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, - LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND)) - expect(result).to eq expected - end - - it "queues a feature request event for an unknown feature" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", key: "badkey", user: user, value: "default", default: "default", - reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) - )) - client.variation_detail("badkey", user, "default") - end - - it "returns a value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) - expect(result).to eq expected - end - - it "returns the default value if a feature evaluates to nil" do - empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) - result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) - expect(result).to eq expected - expect(result.default_value?).to be true - end - - it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: user, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000, - reason: LaunchDarkly::EvaluationReason::off - )) - client.variation_detail("key", user, "default") - end - - it "does not send an event if user is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:error) - client.variation_detail("key", nil, "default") - end - end - - describe '#all_flags' do - let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } - let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } - - it "returns flag values" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = client.all_flags({ key: 'userkey' }) - expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - end - - it "returns empty map for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = client.all_flags(nil) - expect(result).to eq({}) - end - - it "returns empty map for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = client.all_flags({}) - expect(result).to eq({}) - end - - it "returns empty map if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - result = offline_client.all_flags(nil) - expect(result).to eq({}) - end - end - - describe '#all_flags_state' do - let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } - let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } - - it "returns flags state" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = client.all_flags_state({ key: 'userkey' }) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - - result = state.as_json - expect(result).to eq({ - 'key1' => 'value1', - 'key2' => 'value2', - '$flagsState' => { - 'key1' => { - :variation => 0, - :version => 100 - }, - 'key2' => { - :variation => 1, - :version => 200, - :trackEvents => true, - :debugEventsUntilDate => 1000 - } - }, - '$valid' => true - }) - end - - it "can be filtered for only client-side flags" do - flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } - flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } - flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } - flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } - config.feature_store.init({ LaunchDarkly::FEATURES => { - flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 - }}) - - state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) - end - - it "can omit details for untracked flags" do - future_time = (Time.now.to_f * 1000).to_i + 100000 - flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } - flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } - flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) - - state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) - - result = state.as_json - expect(result).to eq({ - 'key1' => 'value1', - 'key2' => 'value2', - 'key3' => 'value3', - '$flagsState' => { - 'key1' => { - :variation => 0 - }, - 'key2' => { - :variation => 1, - :version => 200, - :trackEvents => true - }, - 'key3' => { - :variation => 1, - :version => 300, - :debugEventsUntilDate => future_time - } - }, - '$valid' => true - }) - end - - it "returns empty state for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = client.all_flags_state(nil) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) - end - - it "returns empty state for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = client.all_flags_state({}) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) - end - - it "returns empty state if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - state = offline_client.all_flags_state({ key: 'userkey' }) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) - end - end - - describe '#secure_mode_hash' do - it "will return the expected value for a known message and secret" do - result = client.secure_mode_hash({key: :Message}) - expect(result).to eq "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" - end - end - - describe '#track' do - it "queues up an custom event" do - expect(event_processor).to receive(:add_event).with(hash_including(kind: "custom", key: "custom_event_name", user: user, data: 42)) - client.track("custom_event_name", user, 42) - end - - it "can include a metric value" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: user, metricValue: 1.5)) - client.track("custom_event_name", user, nil, 1.5) - end - - it "includes contextKind with anonymous user" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: user_anonymous, metricValue: 2.2, contextKind: "anonymousUser")) - client.track("custom_event_name", user_anonymous, nil, 2.2) - end - - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.track("custom_event_name", numeric_key_user, nil) - end - - it "does not send an event, and logs a warning, if user is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.track("custom_event_name", nil, nil) - end - - it "does not send an event, and logs a warning, if user key is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.track("custom_event_name", user_without_key, nil) - end - end - - describe '#alias' do - it "queues up an alias event" do - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "alias", key: user[:key], contextKind: "user", previousKey: user_anonymous[:key], previousContextKind: "anonymousUser")) - client.alias(user, user_anonymous) - end - - it "does not send an event, and logs a warning, if user is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.alias(nil, nil) - end - - it "does not send an event, and logs a warning, if user key is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.alias(user_without_key, user_without_key) - end - end - - describe '#identify' do - it "queues up an identify event" do - expect(event_processor).to receive(:add_event).with(hash_including(kind: "identify", key: user[:key], user: user)) - client.identify(user) - end - - it "does not send an event, and logs a warning, if user is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.identify(nil) - end - - it "does not send an event, and logs a warning, if user key is nil" do - expect(event_processor).not_to receive(:add_event) - expect(logger).to receive(:warn) - client.identify(user_without_key) - end - end - - describe 'with send_events: false' do - let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, data_source: null_data}) } - let(:client) { subject.new("secret", config) } - - it "uses a NullEventProcessor" do - ep = client.instance_variable_get(:@event_processor) - expect(ep).to be_a(LaunchDarkly::NullEventProcessor) - end - end - - describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, diagnostic_opt_out: true, data_source: null_data}) } - let(:client_with_events) { subject.new("secret", config_with_events) } - - it "does not use a NullEventProcessor" do - ep = client_with_events.instance_variable_get(:@event_processor) - expect(ep).not_to be_a(LaunchDarkly::NullEventProcessor) - end - end - - describe "feature store data ordering" do - let(:dependency_ordering_test_data) { - { - LaunchDarkly::FEATURES => { - a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, - b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, - c: { key: "c" }, - d: { key: "d" }, - e: { key: "e" }, - f: { key: "f" } - }, - LaunchDarkly::SEGMENTS => { - o: { key: "o" } - } - } - } + context "constructor requirement of non-nil sdk key" do + it "is not enforced when offline" do + subject.new(nil, Config.new(offline: true)) + end - class FakeFeatureStore - attr_reader :received_data + it "is not enforced if use_ldd is true and send_events is false" do + subject.new(nil, Config.new({ use_ldd: true, send_events: false })) + end - def init(all_data) - @received_data = all_data + it "is not enforced if using file data and send_events is false" do + source = FileDataSource.factory({}) + subject.new(nil, Config.new({ data_source: source, send_events: false })) end - end - class FakeUpdateProcessor - def initialize(store, data) - @store = store - @data = data + it "is enforced in streaming mode even if send_events is false" do + expect { + subject.new(nil, Config.new({ send_events: false })) + }.to raise_error(ArgumentError) end - def start - @store.init(@data) - ev = Concurrent::Event.new - ev.set - ev + it "is enforced in polling mode even if send_events is false" do + expect { + subject.new(nil, Config.new({ stream: false, send_events: false })) + }.to raise_error(ArgumentError) end - def stop + it "is enforced if use_ldd is true and send_events is true" do + expect { + subject.new(nil, Config.new({ use_ldd: true })) + }.to raise_error(ArgumentError) end - def initialized? - true + it "is enforced if using file data and send_events is true" do + source = FileDataSource.factory({}) + expect { + subject.new(nil, Config.new({ data_source: source })) + }.to raise_error(ArgumentError) end end - it "passes data set to feature store in correct order on init" do - store = FakeFeatureStore.new - data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, - dependency_ordering_test_data) } - config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) - client = subject.new("secret", config) + context "secure_mode_hash" do + it "will return the expected value for a known message and secret" do + ensure_close(subject.new("secret", test_config)) do |client| + result = client.secure_mode_hash({key: :Message}) + expect(result).to eq "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" + end + end + end - data = store.received_data - expect(data).not_to be_nil - expect(data.count).to eq(2) - - # Segments should always come first - expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) - expect(data.values[0].count).to eq(dependency_ordering_test_data[LaunchDarkly::SEGMENTS].count) + context "feature store data ordering" do + let(:dependency_ordering_test_data) { + { + FEATURES => { + a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, + b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, + c: { key: "c" }, + d: { key: "d" }, + e: { key: "e" }, + f: { key: "f" } + }, + SEGMENTS => { + o: { key: "o" } + } + } + } - # Features should be ordered so that a flag always appears after its prerequisites, if any - expect(data.keys[1]).to be(LaunchDarkly::FEATURES) - flags_map = data.values[1] - flags_list = flags_map.values - expect(flags_list.count).to eq(dependency_ordering_test_data[LaunchDarkly::FEATURES].count) - flags_list.each_with_index do |item, item_index| - (item[:prerequisites] || []).each do |prereq| - prereq = flags_map[prereq[:key].to_sym] - prereq_index = flags_list.index(prereq) - if prereq_index > item_index - all_keys = (flags_list.map { |f| f[:key] }).join(", ") - raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + it "passes data set to feature store in correct order on init" do + store = CapturingFeatureStore.new + data_source_factory = MockDataSource.factory_with_data(dependency_ordering_test_data) + ensure_close(subject.new(sdk_key, test_config(feature_store: store, data_source: data_source_factory))) do |client| + data = store.received_data + expect(data).not_to be_nil + expect(data.count).to eq(2) + + # Segments should always come first + expect(data.keys[0]).to be(SEGMENTS) + expect(data.values[0].count).to eq(dependency_ordering_test_data[SEGMENTS].count) + + # Features should be ordered so that a flag always appears after its prerequisites, if any + expect(data.keys[1]).to be(FEATURES) + flags_map = data.values[1] + flags_list = flags_map.values + expect(flags_list.count).to eq(dependency_ordering_test_data[FEATURES].count) + flags_list.each_with_index do |item, item_index| + (item[:prerequisites] || []).each do |prereq| + prereq = flags_map[prereq[:key].to_sym] + prereq_index = flags_list.index(prereq) + if prereq_index > item_index + all_keys = (flags_list.map { |f| f[:key] }).join(", ") + raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + end + end end end end diff --git a/spec/ldclient_spec_base.rb b/spec/ldclient_spec_base.rb deleted file mode 100644 index 47ecd281..00000000 --- a/spec/ldclient_spec_base.rb +++ /dev/null @@ -1,42 +0,0 @@ -require "ldclient-rb/impl/big_segments" - -require "spec_helper" - -module LaunchDarkly - module LDClientSpecBase - def sdk_key - "sdk-key" - end - - def user - { - key: "userkey", - email: "test@example.com", - name: "Bob" - } - end - - def null_logger - double().as_null_object - end - - def null_data_source - NullUpdateProcessor.new - end - - def base_config - Config.new(send_events: false, data_source: null_data_source, logger: null_logger) - end - - def with_client(config) - client = LDClient.new(sdk_key, config) - begin - yield client - ensure - client.close - end - end - end - - RSpec.configure { |c| c.include LDClientSpecBase, :ldclient_spec_base => true } -end diff --git a/spec/mock_components.rb b/spec/mock_components.rb index aa57431c..488f6b35 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -1,7 +1,55 @@ +require "spec_helper" + require "ldclient-rb/impl/big_segments" +require "ldclient-rb/impl/evaluator" require "ldclient-rb/interfaces" +def sdk_key + "sdk-key" +end + +def null_data + LaunchDarkly::NullUpdateProcessor.new +end + +def null_logger + double().as_null_object +end + +def base_config + { + data_source: null_data, + send_events: false, + logger: null_logger + } +end + +def test_config(add_props = {}) + LaunchDarkly::Config.new(base_config.merge(add_props)) +end + +def with_client(config) + ensure_close(LaunchDarkly::LDClient.new(sdk_key, config)) do |client| + yield client + end +end + +def basic_user + { "key": "user-key" } +end + module LaunchDarkly + class CapturingFeatureStore + attr_reader :received_data + + def init(all_data) + @received_data = all_data + end + + def stop + end + end + class MockBigSegmentStore def initialize @metadata = nil @@ -33,6 +81,37 @@ def setup_membership(user_key, membership) user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) @memberships[user_hash] = membership end + + def setup_segment_for_user(user_key, segment, included) + user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) + @memberships[user_hash] ||= {} + @memberships[user_hash][Impl::Evaluator.make_big_segment_ref(segment)] = included + end + end + + class MockDataSource + def self.factory_with_data(data) + lambda { |sdk_key, config| MockDataSource.new(config.feature_store, data) } + end + + def initialize(store, data) + @store = store + @data = data + end + + def start + @store.init(@data) + ev = Concurrent::Event.new + ev.set + ev + end + + def stop + end + + def initialized? + true + end end class SimpleObserver diff --git a/spec/model_builders.rb b/spec/model_builders.rb new file mode 100644 index 00000000..a7c0bd6e --- /dev/null +++ b/spec/model_builders.rb @@ -0,0 +1,154 @@ + +class FlagBuilder + def initialize(key) + @flag = { + key: key, + version: 1, + variations: [ false ], + rules: [] + } + end + + def build + @flag.clone + end + + def version(value) + @flag[:version] = value + self + end + + def variations(*values) + @flag[:variations] = values + self + end + + def on(value) + @flag[:on] = value + self + end + + def rule(r) + @flag[:rules].append(r.build) + self + end + + def off_with_value(value) + @flag[:variations] = [ value ] + @flag[:offVariation] = 0 + @flag[:on] = false + self + end + + def off_variation(value) + @flag[:offVariation] = value + self + end + + def fallthrough_variation(value) + @flag[:fallthrough] = { variation: value } + self + end + + def track_events(value) + @flag[:trackEvents] = value + self + end + + def track_events_fallthrough(value) + @flag[:trackEventsFallthrough] = value + self + end + + def debug_events_until_date(value) + @flag[:debugEventsUntilDate] = value + self + end +end + +class RuleBuilder + def initialize() + @rule = { + id: "", + variation: 0, + clauses: [] + } + end + + def build + @rule.clone + end + + def id(value) + @rule[:id] = value + self + end + + def variation(value) + @rule[:variation] = value + self + end + + def clause(c) + @rule[:clauses].append(c) + self + end + + def track_events(value) + @rule[:trackEvents] = value + self + end +end + +class SegmentBuilder + def initialize(key) + @segment = { + key: key, + version: 1, + included: [], + excluded: [] + } + end + + def build + @segment.clone + end + + def included(*keys) + @segment[:included] = keys + self + end + + def excluded(*keys) + @segment[:excluded] = keys + self + end + + def unbounded(value) + @segment[:unbounded] = value + self + end + + def generation(value) + @segment[:generation] = value + self + end +end + +class Clauses + def self.match_segment(segment) + { + "attribute": "", + "op": "segmentMatch", + "values": [ segment.is_a?(Hash) ? segment[:key] : segment ] + } + end + + def self.match_user(user) + { + "attribute": "key", + "op": "in", + "values": [ user[:key] ] + } + end +end From 554407c7e9021aad2bba757fc6a58851c836e9cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 Dec 2021 12:23:49 -0800 Subject: [PATCH 217/292] more cleanup/DRY --- spec/ldclient_evaluation_spec.rb | 232 ++++++++++++++++--------------- spec/ldclient_events_spec.rb | 46 +++--- spec/ldclient_spec.rb | 2 +- 3 files changed, 147 insertions(+), 133 deletions(-) diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index da104e22..ae87ed68 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -7,18 +7,15 @@ module LaunchDarkly describe "LDClient evaluation tests" do context "variation" do - feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, - trackEvents: true, debugEventsUntilDate: 1000 } - it "returns the default value if the client is offline" do - ensure_close(LDClient.new(sdk_key, test_config(offline: true))) do |offline_client| + with_client(test_config(offline: true)) do |offline_client| result = offline_client.variation("doesntmatter", basic_user, "default") expect(result).to eq "default" end end it "returns the default value for an unknown feature" do - ensure_close(LDClient.new(sdk_key, test_config())) do |client| + with_client(test_config) do |client| expect(client.variation("badkey", basic_user, "default")).to eq "default" end end @@ -28,7 +25,7 @@ module LaunchDarkly store = InMemoryFeatureStore.new store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(client.variation("flagkey", basic_user, "default")).to eq "value" end end @@ -38,7 +35,7 @@ module LaunchDarkly store = InMemoryFeatureStore.new store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(client.variation("flagkey", basic_user, "default")).to eq "default" end end @@ -52,7 +49,7 @@ module LaunchDarkly store.upsert(SEGMENTS, segment) store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(client.variation("flagkey", basic_user, false)).to be true end end @@ -70,7 +67,7 @@ module LaunchDarkly segstore.setup_segment_for_user(basic_user[:key], segment, true) big_seg_config = BigSegmentsConfig.new(store: segstore) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, big_segments: big_seg_config))) do |client| + with_client(test_config(feature_store: store, big_segments: big_seg_config)) do |client| expect(client.variation("flagkey", basic_user, false)).to be true end end @@ -81,37 +78,44 @@ module LaunchDarkly trackEvents: true, debugEventsUntilDate: 1000 } it "returns the default value if the client is offline" do - offline_client = LDClient.new(sdk_key, test_config(offline: true)) - result = offline_client.variation_detail("doesntmatter", basic_user, "default") - expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_CLIENT_NOT_READY)) - expect(result).to eq expected + with_client(test_config(offline: true)) do |offline_client| + result = offline_client.variation_detail("doesntmatter", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_CLIENT_NOT_READY)) + expect(result).to eq expected + end end it "returns the default value for an unknown feature" do - client = LDClient.new(sdk_key, test_config()) - result = client.variation_detail("badkey", basic_user, "default") - expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)) - expect(result).to eq expected + with_client(test_config) do |client| + result = client.variation_detail("badkey", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)) + expect(result).to eq expected + end end it "returns a value for an existing feature" do + flag = FlagBuilder.new("key").off_with_value("value").build store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) - store.upsert(FEATURES, feature_with_value) - result = client.variation_detail("key", basic_user, "default") - expected = EvaluationDetail.new("value", 0, EvaluationReason::off) - expect(result).to eq expected + store.upsert(FEATURES, flag) + + with_client(test_config(feature_store: store)) do |client| + result = client.variation_detail("key", basic_user, "default") + expected = EvaluationDetail.new("value", 0, EvaluationReason::off) + expect(result).to eq expected + end end it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.upsert(FEATURES, empty_feature) - result = client.variation_detail("key", basic_user, "default") - expected = EvaluationDetail.new("default", nil, EvaluationReason::off) - expect(result).to eq expected - expect(result.default_value?).to be true + + with_client(test_config(feature_store: store)) do |client| + result = client.variation_detail("key", basic_user, "default") + expected = EvaluationDetail.new("default", nil, EvaluationReason::off) + expect(result).to eq expected + expect(result.default_value?).to be true + end end it "includes big segment status in reason when evaluating a flag that references a big segment" do @@ -128,7 +132,7 @@ module LaunchDarkly segstore.setup_metadata(Time.now) big_seg_config = BigSegmentsConfig.new(store: segstore) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, big_segments: big_seg_config))) do |client| + with_client(test_config(feature_store: store, big_segments: big_seg_config)) do |client| result = client.variation_detail("flagkey", basic_user, false) expect(result.value).to be true expect(result.reason.big_segments_status).to eq(BigSegmentsStatus::HEALTHY) @@ -142,38 +146,42 @@ module LaunchDarkly it "returns flag values" do store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - result = client.all_flags({ key: 'userkey' }) - expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + with_client(test_config(feature_store: store)) do |client| + result = client.all_flags({ key: 'userkey' }) + expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end end it "returns empty map for nil user" do store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - result = client.all_flags(nil) - expect(result).to eq({}) + with_client(test_config(feature_store: store)) do |client| + result = client.all_flags(nil) + expect(result).to eq({}) + end end it "returns empty map for nil user key" do store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - result = client.all_flags({}) - expect(result).to eq({}) + with_client(test_config(feature_store: store)) do |client| + result = client.all_flags({}) + expect(result).to eq({}) + end end it "returns empty map if offline" do store = InMemoryFeatureStore.new - offline_client = LDClient.new(sdk_key, test_config(offline: true, feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - result = offline_client.all_flags(nil) - expect(result).to eq({}) + with_client(test_config(feature_store: store, offline: true)) do |offline_client| + result = offline_client.all_flags(nil) + expect(result).to eq({}) + end end end @@ -183,33 +191,34 @@ module LaunchDarkly it "returns flags state" do store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - state = client.all_flags_state({ key: 'userkey' }) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - - result = state.as_json - expect(result).to eq({ - 'key1' => 'value1', - 'key2' => 'value2', - '$flagsState' => { - 'key1' => { - :variation => 0, - :version => 100 + with_client(test_config(feature_store: store)) do |client| + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + :variation => 0, + :version => 100 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 + } }, - 'key2' => { - :variation => 1, - :version => 200, - :trackEvents => true, - :debugEventsUntilDate => 1000 - } - }, - '$valid' => true - }) + '$valid' => true + }) + end end it "can be filtered for only client-side flags" do @@ -219,16 +228,17 @@ module LaunchDarkly flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 }}) - state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) - expect(state.valid?).to be true + with_client(test_config(feature_store: store)) do |client| + state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) + expect(state.valid?).to be true - values = state.values_map - expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + values = state.values_map + expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + end end it "can omit details for untracked flags" do @@ -238,67 +248,71 @@ module LaunchDarkly flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) - state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) - expect(state.valid?).to be true - - values = state.values_map - expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) - - result = state.as_json - expect(result).to eq({ - 'key1' => 'value1', - 'key2' => 'value2', - 'key3' => 'value3', - '$flagsState' => { - 'key1' => { - :variation => 0 - }, - 'key2' => { - :variation => 1, - :version => 200, - :trackEvents => true + with_client(test_config(feature_store: store)) do |client| + state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + 'key3' => 'value3', + '$flagsState' => { + 'key1' => { + :variation => 0 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true + }, + 'key3' => { + :variation => 1, + :version => 300, + :debugEventsUntilDate => future_time + } }, - 'key3' => { - :variation => 1, - :version => 300, - :debugEventsUntilDate => future_time - } - }, - '$valid' => true - }) + '$valid' => true + }) + end end it "returns empty state for nil user" do store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - state = client.all_flags_state(nil) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) + with_client(test_config(feature_store: store)) do |client| + state = client.all_flags_state(nil) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end end it "returns empty state for nil user key" do store = InMemoryFeatureStore.new - client = LDClient.new(sdk_key, test_config(feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - state = client.all_flags_state({}) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) + with_client(test_config(feature_store: store)) do |client| + state = client.all_flags_state({}) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end end it "returns empty state if offline" do store = InMemoryFeatureStore.new - offline_client = LDClient.new(sdk_key, test_config(offline: true, feature_store: store)) store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - state = offline_client.all_flags_state({ key: 'userkey' }) - expect(state.valid?).to be false - expect(state.values_map).to eq({}) + with_client(test_config(feature_store: store, offline: true)) do |offline_client| + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end end end end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index c88c2c00..486d93a1 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -11,14 +11,14 @@ def event_processor(client) end it 'uses NullEventProcessor if send_events is false' do - ensure_close(LDClient.new(sdk_key, test_config(send_events: false))) do |client| + with_client(test_config(send_events: false)) do |client| expect(event_processor(client)).to be_a(LaunchDarkly::NullEventProcessor) end end context "evaluation events - variation" do it "unknown flag" do - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default" )) @@ -32,7 +32,7 @@ def event_processor(client) store = InMemoryFeatureStore.new store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", key: flag[:key], @@ -54,7 +54,7 @@ def event_processor(client) store.upsert(FEATURES, flag) logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + with_client(test_config(feature_store: store, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:error) client.variation(flag[:key], nil, "default") @@ -68,7 +68,7 @@ def event_processor(client) logger = double().as_null_object keyless_user = { key: nil } - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + with_client(test_config(feature_store: store, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.variation(flag[:key], keyless_user, "default") @@ -83,7 +83,7 @@ def event_processor(client) store = InMemoryFeatureStore.new store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", key: flag[:key], @@ -105,7 +105,7 @@ def event_processor(client) store = InMemoryFeatureStore.new store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", key: flag[:key], @@ -124,7 +124,7 @@ def event_processor(client) context "evaluation events - variation_detail" do it "unknown flag" do - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default", reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) @@ -139,7 +139,7 @@ def event_processor(client) store = InMemoryFeatureStore.new store.upsert(FEATURES, flag) - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store))) do |client| + with_client(test_config(feature_store: store)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", key: flag[:key], @@ -162,7 +162,7 @@ def event_processor(client) store.upsert(FEATURES, flag) logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + with_client(test_config(feature_store: store, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:error) client.variation_detail(flag[:key], nil, "default") @@ -175,7 +175,7 @@ def event_processor(client) store.upsert(FEATURES, flag) logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(feature_store: store, logger: logger))) do |client| + with_client(test_config(feature_store: store, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.variation_detail(flag[:key], { key: nil }, "default") @@ -185,7 +185,7 @@ def event_processor(client) context "identify" do it "queues up an identify event" do - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "identify", key: basic_user[:key], user: basic_user)) client.identify(basic_user) @@ -195,7 +195,7 @@ def event_processor(client) it "does not send event, and logs warning, if user is nil" do logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.identify(nil) @@ -205,7 +205,7 @@ def event_processor(client) it "does not send event, and logs warning, if user key is nil" do logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.identify({ key: nil }) @@ -215,7 +215,7 @@ def event_processor(client) context "track" do it "queues up an custom event" do - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "custom", key: "custom_event_name", user: basic_user, data: 42)) client.track("custom_event_name", basic_user, 42) @@ -223,7 +223,7 @@ def event_processor(client) end it "can include a metric value" do - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "custom", key: "custom_event_name", user: basic_user, metricValue: 1.5)) client.track("custom_event_name", basic_user, nil, 1.5) @@ -233,7 +233,7 @@ def event_processor(client) it "includes contextKind with anonymous user" do anon_user = { key: 'user-key', anonymous: true } - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "custom", key: "custom_event_name", user: anon_user, metricValue: 2.2, contextKind: "anonymousUser")) client.track("custom_event_name", anon_user, nil, 2.2) @@ -244,7 +244,7 @@ def event_processor(client) numeric_key_user = { key: 33 } sanitized_user = { key: "33" } - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including(user: sanitized_user)) client.track("custom_event_name", numeric_key_user, nil) end @@ -253,7 +253,7 @@ def event_processor(client) it "does not send event, and logs a warning, if user is nil" do logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.track("custom_event_name", nil, nil) @@ -263,7 +263,7 @@ def event_processor(client) it "does not send event, and logs warning, if user key is nil" do logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.track("custom_event_name", { key: nil }, nil) @@ -275,7 +275,7 @@ def event_processor(client) it "queues up an alias event" do anon_user = { key: "user-key", anonymous: true } - ensure_close(LDClient.new(sdk_key, test_config)) do |client| + with_client(test_config) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "alias", key: basic_user[:key], contextKind: "user", previousKey: anon_user[:key], previousContextKind: "anonymousUser")) client.alias(basic_user, anon_user) @@ -285,7 +285,7 @@ def event_processor(client) it "does not send event, and logs warning, if user is nil" do logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.alias(nil, nil) @@ -295,7 +295,7 @@ def event_processor(client) it "does not send event, and logs warning, if user key is nil" do logger = double().as_null_object - ensure_close(LDClient.new(sdk_key, test_config(logger: logger))) do |client| + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) client.alias({ key: nil }, { key: nil }) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6b15245d..ae4e948c 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -74,7 +74,7 @@ module LaunchDarkly it "passes data set to feature store in correct order on init" do store = CapturingFeatureStore.new data_source_factory = MockDataSource.factory_with_data(dependency_ordering_test_data) - ensure_close(subject.new(sdk_key, test_config(feature_store: store, data_source: data_source_factory))) do |client| + with_client(test_config(feature_store: store, data_source: data_source_factory)) do |client| data = store.received_data expect(data).not_to be_nil expect(data.count).to eq(2) From 2b544c52e008eb0afe1067a74c58d353d2719f41 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 Dec 2021 17:13:59 -0800 Subject: [PATCH 218/292] add use_preconfigured_flag and use_preconfigured_segment to TestData (#173) --- .../test_data/test_data_source.rb | 4 +- lib/ldclient-rb/integrations/test_data.rb | 78 +++++++++++++++-- .../integrations/test_data/flag_builder.rb | 2 +- spec/integrations/test_data_spec.rb | 84 +++++++++++++------ 4 files changed, 132 insertions(+), 36 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb b/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb index b201c26e..a2799a7d 100644 --- a/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +++ b/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb @@ -30,8 +30,8 @@ def stop @test_data.closed_instance(self) end - def upsert(new_flag) - @feature_store.upsert(FEATURES, new_flag) + def upsert(kind, item) + @feature_store.upsert(kind, item) end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 3a8f190f..dc9612c8 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -41,6 +41,7 @@ def self.data_source def initialize @flag_builders = Hash.new @current_flags = Hash.new + @current_segments = Hash.new @instances = Array.new @instances_lock = Concurrent::ReadWriteLock.new @lock = Concurrent::ReadWriteLock.new @@ -85,9 +86,9 @@ def call(_, config) def flag(key) existing_builder = @lock.with_read_lock { @flag_builders[key] } if existing_builder.nil? then - FlagBuilder.new(key).boolean_flag + FlagBuilder.new(key).boolean_flag else - existing_builder.clone + existing_builder.clone end end @@ -104,7 +105,7 @@ def flag(key) # unless you call {#update} again. # # @param flag_builder [FlagBuilder] a flag configuration builder - # @return [TestData] self + # @return [TestData] the TestData instance # def update(flag_builder) new_flag = nil @@ -118,17 +119,82 @@ def update(flag_builder) new_flag = flag_builder.build(version+1) @current_flags[flag_key] = new_flag end + update_item(FEATURES, new_flag) + self + end + + # + # Copies a full feature flag data model object into the test data. + # + # It immediately propagates the flag change to any `LDClient` instance(s) that you have already + # configured to use this `TestData`. If no `LDClient` has been started yet, it simply adds + # this flag to the test data which will be provided to any LDClient that you subsequently + # configure. + # + # Use this method if you need to use advanced flag configuration properties that are not supported by + # the simplified {FlagBuilder} API. Otherwise it is recommended to use the regular {flag}/{update} + # mechanism to avoid dependencies on details of the data model. + # + # You cannot make incremental changes with {flag}/{update} to a flag that has been added in this way; + # you can only replace it with an entirely new flag configuration. + # + # @param flag [Hash] the flag configuration + # @return [TestData] the TestData instance + # + def use_preconfigured_flag(flag) + use_preconfigured_item(FEATURES, flag, @current_flags) + end + + # + # Copies a full user segment data model object into the test data. + # + # It immediately propagates the change to any `LDClient` instance(s) that you have already + # configured to use this `TestData`. If no `LDClient` has been started yet, it simply adds + # this segment to the test data which will be provided to any LDClient that you subsequently + # configure. + # + # This method is currently the only way to inject user segment data, since there is no builder + # API for segments. It is mainly intended for the SDK's own tests of user segment functionality, + # since application tests that need to produce a desired evaluation state could do so more easily + # by just setting flag values. + # + # @param segment [Hash] the segment configuration + # @return [TestData] the TestData instance + # + def use_preconfigured_segment(segment) + use_preconfigured_item(SEGMENTS, segment, @current_segments) + end + + private def use_preconfigured_item(kind, item, current) + key = item[:key].to_sym + @lock.with_write_lock do + old_item = current[key] + if !old_item.nil? then + item = item.clone + item[:version] = old_item[:version] + 1 + end + current[key] = item + end + update_item(kind, item) + self + end + + private def update_item(kind, item) @instances_lock.with_read_lock do @instances.each do | instance | - instance.upsert(new_flag) + instance.upsert(kind, item) end end - self end # @private def make_init_data - { FEATURES => @current_flags } + @lock.with_read_lock do + { + FEATURES => @current_flags.clone, + SEGMENTS => @current_segments.clone + } + end end # @private diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index 3f3cc36c..79d6247b 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -279,7 +279,7 @@ def build(version) } unless @off_variation.nil? then - res[:off_variation] = @off_variation + res[:offVariation] = @off_variation end unless @fallthrough_variation.nil? then diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb index 8d3ba024..75418bd3 100644 --- a/spec/integrations/test_data_spec.rb +++ b/spec/integrations/test_data_spec.rb @@ -4,23 +4,23 @@ module LaunchDarkly module Integrations describe 'TestData' do it 'is a valid datasource' do - td = LaunchDarkly::Integrations::TestData.data_source - config = LaunchDarkly::Config.new(send_events: false, data_source: td) - client = LaunchDarkly::LDClient.new('sdkKey', config) - expect(config.feature_store.all(LaunchDarkly::FEATURES)).to eql({}) + td = Integrations::TestData.data_source + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + expect(config.feature_store.all(FEATURES)).to eql({}) client.close end it 'initializes the feature store with existing flags' do - td = LaunchDarkly::Integrations::TestData.data_source + td = Integrations::TestData.data_source td.update(td.flag('flag')) - config = LaunchDarkly::Config.new(send_events: false, data_source: td) - client = LaunchDarkly::LDClient.new('sdkKey', config) - expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + expect(config.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, - off_variation: 1, + offVariation: 1, on: true, version: 1 }) @@ -28,45 +28,45 @@ module Integrations end it 'updates the feature store with new flags' do - td = LaunchDarkly::Integrations::TestData.data_source + td = Integrations::TestData.data_source td.update(td.flag('flag')) - config = LaunchDarkly::Config.new(send_events: false, data_source: td) - client = LaunchDarkly::LDClient.new('sdkKey', config) - config2 = LaunchDarkly::Config.new(send_events: false, data_source: td) - client2 = LaunchDarkly::LDClient.new('sdkKey', config2) + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + config2 = Config.new(send_events: false, data_source: td) + client2 = LDClient.new('sdkKey', config2) - expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, - off_variation: 1, + offVariation: 1, on: true, version: 1 }) - expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, - off_variation: 1, + offVariation: 1, on: true, version: 1 }) td.update(td.flag('flag').variation_for_all_users(false)) - expect(config.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 1 }, - off_variation: 1, + offVariation: 1, on: true, version: 2 }) - expect(config2.feature_store.get(LaunchDarkly::FEATURES, 'flag')).to eql({ + expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 1 }, - off_variation: 1, + offVariation: 1, on: true, version: 2 }) @@ -75,12 +75,42 @@ module Integrations client2.close end + it 'can include preconfigured items' do + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ key: 'my-flag', version: 1000, on: true }) + td.use_preconfigured_segment({ key: 'my-segment', version: 2000 }) + + config = Config.new(send_events: false, data_source: td) + client = LDClient.new('sdkKey', config) + + expect(config.feature_store.get(FEATURES, 'my-flag')).to eql({ + key: 'my-flag', version: 1000, on: true + }) + expect(config.feature_store.get(SEGMENTS, 'my-segment')).to eql({ + key: 'my-segment', version: 2000 + }) + + td.use_preconfigured_flag({ key: 'my-flag', on: false }) + + expect(config.feature_store.get(FEATURES, 'my-flag')).to eql({ + key: 'my-flag', version: 1001, on: false + }) + + td.use_preconfigured_segment({ key: 'my-segment', included: [ 'x' ] }) + + expect(config.feature_store.get(SEGMENTS, 'my-segment')).to eql({ + key: 'my-segment', version: 2001, included: [ 'x' ] + }) + + client.close + end + it 'TestData.flag defaults to a boolean flag' do td = TestData.new f = td.flag('flag').build(0) expect(f[:variations]).to eq([true, false]) expect(f[:fallthrough][:variation]).to eq(0) - expect(f[:off_variation]).to eq(1) + expect(f[:offVariation]).to eq(1) end it 'TestData.flag returns a copy of the existing flag if it exists' do @@ -116,7 +146,7 @@ module Integrations it 'can set variation for when targeting is off' do f = TestData::FlagBuilder.new('flag').off_variation(0).build(1) - expect(f[:off_variation]).to eq(0) + expect(f[:offVariation]).to eq(0) end it 'can set a list of variations' do @@ -128,17 +158,17 @@ module Integrations f = TestData::FlagBuilder.new('flag').boolean_flag.build(1) expect(f[:variations]).to eq([true, false]) expect(f[:fallthrough][:variation]).to eq(0) - expect(f[:off_variation]).to eq(1) + expect(f[:offVariation]).to eq(1) end it 'can handle boolean or index variation' do f = TestData::FlagBuilder.new('flag').off_variation(true).build(1) expect(f[:variations]).to eq([true, false]) - expect(f[:off_variation]).to eq(0) + expect(f[:offVariation]).to eq(0) f2 = TestData::FlagBuilder.new('flag').fallthrough_variation(true).build(1) expect(f2[:variations]).to eq([true, false]) - expect(f2[:off_variation]).to eq(1) + expect(f2[:offVariation]).to eq(1) end it 'can set variation for all users' do From c8c86938be96595dbebf91edca567f4a4961faee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 Dec 2021 18:10:25 -0800 Subject: [PATCH 219/292] always cache big segment query result even if it's nil --- lib/ldclient-rb/impl/big_segments.rb | 5 +++++ spec/impl/big_segments_spec.rb | 14 ++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index 69f40bb7..c2d82cd8 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -11,6 +11,10 @@ module Impl BigSegmentMembershipResult = Struct.new(:membership, :status) class BigSegmentStoreManager + # use this as a singleton whenever a membership query returns nil; it's safe to reuse it because + # we will never modify the membership properties after they're queried + EMPTY_MEMBERSHIP = {} + def initialize(big_segments_config, logger) @store = big_segments_config.store @stale_after_millis = big_segments_config.stale_after * 1000 @@ -38,6 +42,7 @@ def get_user_membership(user_key) if !membership begin membership = @store.get_membership(BigSegmentStoreManager.hash_for_user_key(user_key)) + membership = EMPTY_MEMBERSHIP if membership.nil? @cache[user_key] = membership rescue => e LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e) diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb index 640e03dc..5d0ba192 100644 --- a/spec/impl/big_segments_spec.rb +++ b/spec/impl/big_segments_spec.rb @@ -3,6 +3,7 @@ require "concurrent/atomics" +require "mock_components" require "spec_helper" module LaunchDarkly @@ -59,6 +60,19 @@ def with_manager(config) end end + it "can cache a nil result" do + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).once.and_return(nil) + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store)) do |m| + expected_result = BigSegmentMembershipResult.new({}, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + it "with stale status" do expected_membership = { 'key1' => true, 'key2' => true } store = double From 62832779b717e0931dd0360adb29e42ed5ab7d06 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 Dec 2021 18:13:30 -0800 Subject: [PATCH 220/292] comments --- spec/impl/big_segments_spec.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb index 5d0ba192..134edf0d 100644 --- a/spec/impl/big_segments_spec.rb +++ b/spec/impl/big_segments_spec.rb @@ -51,6 +51,7 @@ def with_manager(config) store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) expect(store).to receive(:get_membership).with(user_hash).once.and_return(expected_membership) + # the ".once" on this mock expectation is what verifies that the cache is working; there should only be one query allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| @@ -64,6 +65,7 @@ def with_manager(config) store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) expect(store).to receive(:get_membership).with(user_hash).once.and_return(nil) + # the ".once" on this mock expectation is what verifies that the cache is working; there should only be one query allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| From 88e6b2ad9c76c596ea2ac5c320cbf541f7dd0c18 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 Dec 2021 20:46:02 -0800 Subject: [PATCH 221/292] add test for cache expiration --- spec/impl/big_segments_spec.rb | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb index 134edf0d..89637653 100644 --- a/spec/impl/big_segments_spec.rb +++ b/spec/impl/big_segments_spec.rb @@ -75,6 +75,22 @@ def with_manager(config) end end + it "cache can expire" do + expected_membership = { 'key1' => true, 'key2' => true } + store = double + expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) + expect(store).to receive(:get_membership).with(user_hash).twice.and_return(expected_membership) + # the ".twice" on this mock expectation is what verifies that the cached result expired + allow(store).to receive(:stop) + + with_manager(BigSegmentsConfig.new(store: store, user_cache_time: 0.01)) do |m| + expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) + expect(m.get_user_membership(user_key)).to eq(expected_result) + sleep(0.1) + expect(m.get_user_membership(user_key)).to eq(expected_result) + end + end + it "with stale status" do expected_membership = { 'key1' => true, 'key2' => true } store = double From d9c3274f360f1b39de127696f935e81bbe97bb9e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 9 Dec 2021 09:47:13 -0800 Subject: [PATCH 222/292] use TestData in our own tests (#174) * use TestData in our own tests * fix test --- spec/ldclient_evaluation_spec.rb | 169 ++++++++++++++----------------- spec/ldclient_events_spec.rb | 118 ++++++++++----------- spec/ldclient_spec.rb | 7 +- spec/mock_components.rb | 25 ----- 4 files changed, 139 insertions(+), 180 deletions(-) diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index ae87ed68..c63cb882 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -21,53 +21,55 @@ module LaunchDarkly end it "returns the value for an existing feature" do - flag = FlagBuilder.new("flagkey").off_with_value("value").build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: td)) do |client| expect(client.variation("flagkey", basic_user, "default")).to eq "value" end end it "returns the default value if a feature evaluates to nil" do - flag = FlagBuilder.new("flagkey").on(false).off_variation(nil).build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) - - with_client(test_config(feature_store: store)) do |client| + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ # TestData normally won't construct a flag with offVariation: nil + key: "flagkey", + on: false, + offVariation: nil + }) + + with_client(test_config(data_source: td)) do |client| expect(client.variation("flagkey", basic_user, "default")).to eq "default" end end it "can evaluate a flag that references a segment" do + td = Integrations::TestData.data_source segment = SegmentBuilder.new("segmentkey").included(basic_user[:key]).build - flag = FlagBuilder.new("flagkey").on(true).variations(true, false).rule( - RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) - ).build - store = InMemoryFeatureStore.new - store.upsert(SEGMENTS, segment) - store.upsert(FEATURES, flag) - - with_client(test_config(feature_store: store)) do |client| + td.use_preconfigured_segment(segment) + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build) + + with_client(test_config(data_source: td)) do |client| expect(client.variation("flagkey", basic_user, false)).to be true end end it "can evaluate a flag that references a big segment" do + td = Integrations::TestData.data_source segment = SegmentBuilder.new("segmentkey").unbounded(true).generation(1).build - flag = FlagBuilder.new("flagkey").on(true).variations(true, false).rule( - RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) - ).build - store = InMemoryFeatureStore.new - store.upsert(SEGMENTS, segment) - store.upsert(FEATURES, flag) + td.use_preconfigured_segment(segment) + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build) segstore = MockBigSegmentStore.new segstore.setup_segment_for_user(basic_user[:key], segment, true) big_seg_config = BigSegmentsConfig.new(store: segstore) - with_client(test_config(feature_store: store, big_segments: big_seg_config)) do |client| + with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| expect(client.variation("flagkey", basic_user, false)).to be true end end @@ -94,24 +96,26 @@ module LaunchDarkly end it "returns a value for an existing feature" do - flag = FlagBuilder.new("key").off_with_value("value").build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) - - with_client(test_config(feature_store: store)) do |client| - result = client.variation_detail("key", basic_user, "default") + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + with_client(test_config(data_source: td)) do |client| + result = client.variation_detail("flagkey", basic_user, "default") expected = EvaluationDetail.new("value", 0, EvaluationReason::off) expect(result).to eq expected end end it "returns the default value if a feature evaluates to nil" do - empty_feature = { key: "key", on: false, offVariation: nil } - store = InMemoryFeatureStore.new - store.upsert(FEATURES, empty_feature) - - with_client(test_config(feature_store: store)) do |client| - result = client.variation_detail("key", basic_user, "default") + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ # TestData normally won't construct a flag with offVariation: nil + key: "flagkey", + on: false, + offVariation: nil + }) + + with_client(test_config(data_source: td)) do |client| + result = client.variation_detail("flagkey", basic_user, "default") expected = EvaluationDetail.new("default", nil, EvaluationReason::off) expect(result).to eq expected expect(result.default_value?).to be true @@ -119,20 +123,20 @@ module LaunchDarkly end it "includes big segment status in reason when evaluating a flag that references a big segment" do + td = Integrations::TestData.data_source segment = SegmentBuilder.new("segmentkey").unbounded(true).generation(1).build - flag = FlagBuilder.new("flagkey").on(true).variations(true, false).rule( - RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) - ).build - store = InMemoryFeatureStore.new - store.upsert(SEGMENTS, segment) - store.upsert(FEATURES, flag) + td.use_preconfigured_segment(segment) + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").on(true).variations(true, false).rule( + RuleBuilder.new.variation(0).clause(Clauses.match_segment(segment)) + ).build) segstore = MockBigSegmentStore.new segstore.setup_segment_for_user(basic_user[:key], segment, true) segstore.setup_metadata(Time.now) big_seg_config = BigSegmentsConfig.new(store: segstore) - with_client(test_config(feature_store: store, big_segments: big_seg_config)) do |client| + with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| result = client.variation_detail("flagkey", basic_user, false) expect(result.value).to be true expect(result.reason.big_segments_status).to eq(BigSegmentsStatus::HEALTHY) @@ -143,42 +147,36 @@ module LaunchDarkly describe "all_flags" do let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } + let(:test_data) { + td = Integrations::TestData.data_source + td.use_preconfigured_flag(flag1) + td.use_preconfigured_flag(flag2) + td + } it "returns flag values" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: test_data)) do |client| result = client.all_flags({ key: 'userkey' }) expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end end it "returns empty map for nil user" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: test_data)) do |client| result = client.all_flags(nil) expect(result).to eq({}) end end it "returns empty map for nil user key" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: test_data)) do |client| result = client.all_flags({}) expect(result).to eq({}) end end it "returns empty map if offline" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store, offline: true)) do |offline_client| + with_client(test_config(data_source: test_data, offline: true)) do |offline_client| result = offline_client.all_flags(nil) expect(result).to eq({}) end @@ -188,12 +186,16 @@ module LaunchDarkly context "all_flags_state" do let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } + let(:test_data) { + td = Integrations::TestData.data_source + td.use_preconfigured_flag(flag1) + td.use_preconfigured_flag(flag2) + td + } it "returns flags state" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: test_data)) do |client| state = client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be true @@ -222,17 +224,13 @@ module LaunchDarkly end it "can be filtered for only client-side flags" do - flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } - flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } - flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } - flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false }) + td.use_preconfigured_flag({ key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false }) + td.use_preconfigured_flag({ key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true }) + td.use_preconfigured_flag({ key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true }) - store = InMemoryFeatureStore.new - store.init({ FEATURES => { - flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 - }}) - - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: td)) do |client| state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) expect(state.valid?).to be true @@ -243,14 +241,12 @@ module LaunchDarkly it "can omit details for untracked flags" do future_time = (Time.now.to_f * 1000).to_i + 100000 - flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } - flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } - flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + td = Integrations::TestData.data_source + td.use_preconfigured_flag({ key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false }) + td.use_preconfigured_flag({ key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true }) + td.use_preconfigured_flag({ key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time }) - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: td)) do |client| state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true @@ -283,10 +279,7 @@ module LaunchDarkly end it "returns empty state for nil user" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: test_data)) do |client| state = client.all_flags_state(nil) expect(state.valid?).to be false expect(state.values_map).to eq({}) @@ -294,10 +287,7 @@ module LaunchDarkly end it "returns empty state for nil user key" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: test_data)) do |client| state = client.all_flags_state({}) expect(state.valid?).to be false expect(state.values_map).to eq({}) @@ -305,10 +295,7 @@ module LaunchDarkly end it "returns empty state if offline" do - store = InMemoryFeatureStore.new - store.init({ FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) - - with_client(test_config(feature_store: store, offline: true)) do |offline_client| + with_client(test_config(data_source: test_data, offline: true)) do |offline_client| state = offline_client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be false expect(state.values_map).to eq({}) diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index 486d93a1..86eaa77d 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -27,67 +27,64 @@ def event_processor(client) end it "known flag" do - flag = FlagBuilder.new("flagkey").version(100).off_with_value("value"). - track_events(true).debug_events_until_date(1000).build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: td)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", - key: flag[:key], - version: flag[:version], + key: "flagkey", + version: 1, user: basic_user, variation: 0, value: "value", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 + default: "default" )) - client.variation(flag[:key], basic_user, "default") + client.variation("flagkey", basic_user, "default") end end it "does not send event, and logs error, if user is nil" do - flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + logger = double().as_null_object - with_client(test_config(feature_store: store, logger: logger)) do |client| + with_client(test_config(data_source: td, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:error) - client.variation(flag[:key], nil, "default") + client.variation("flagkey", nil, "default") end end it "does not send event, and logs warning, if user key is nil" do - flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + logger = double().as_null_object keyless_user = { key: nil } - with_client(test_config(feature_store: store, logger: logger)) do |client| + with_client(test_config(data_source: td, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) - client.variation(flag[:key], keyless_user, "default") + client.variation("flagkey", keyless_user, "default") end end it "sets trackEvents and reason if trackEvents is set for matched rule" do - flag = FlagBuilder.new("flagkey").version(100).on(true).variations("value"). - rule(RuleBuilder.new.variation(0).id("id").track_events(true). - clause(Clauses.match_user(basic_user))). - build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) - - with_client(test_config(feature_store: store)) do |client| + td = Integrations::TestData.data_source + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").version(100).on(true).variations("value"). + rule(RuleBuilder.new.variation(0).id("id").track_events(true). + clause(Clauses.match_user(basic_user))). + build + ) + + with_client(test_config(data_source: td)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", - key: flag[:key], - version: flag[:version], + key: "flagkey", + version: 100, user: basic_user, variation: 0, value: "value", @@ -95,21 +92,22 @@ def event_processor(client) trackEvents: true, reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') )) - client.variation(flag[:key], basic_user, "default") + client.variation("flagkey", basic_user, "default") end end it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do - flag = FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0). - track_events_fallthrough(true).build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0). + track_events_fallthrough(true).build + ) - with_client(test_config(feature_store: store)) do |client| + with_client(test_config(data_source: td)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", - key: flag[:key], - version: flag[:version], + key: "flagkey", + version: 100, user: basic_user, variation: 0, value: "value", @@ -117,7 +115,7 @@ def event_processor(client) trackEvents: true, reason: LaunchDarkly::EvaluationReason::fallthrough )) - client.variation(flag[:key], basic_user, "default") + client.variation("flagkey", basic_user, "default") end end end @@ -134,51 +132,47 @@ def event_processor(client) end it "known flag" do - flag = FlagBuilder.new("flagkey").version(100).off_with_value("value"). - track_events(true).debug_events_until_date(1000).build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) - - with_client(test_config(feature_store: store)) do |client| + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + with_client(test_config(data_source: td)) do |client| expect(event_processor(client)).to receive(:add_event).with(hash_including( kind: "feature", - key: flag[:key], - version: flag[:version], + key: "flagkey", + version: 1, user: basic_user, variation: 0, value: "value", default: "default", - trackEvents: true, - debugEventsUntilDate: 1000, reason: LaunchDarkly::EvaluationReason::off )) - client.variation_detail(flag[:key], basic_user, "default") + client.variation_detail("flagkey", basic_user, "default") end end it "does not send event, and logs error, if user is nil" do - flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + logger = double().as_null_object - - with_client(test_config(feature_store: store, logger: logger)) do |client| + + with_client(test_config(data_source: td, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:error) - client.variation_detail(flag[:key], nil, "default") + client.variation_detail("flagkey", nil, "default") end end it "does not send event, and logs warning, if user key is nil" do - flag = FlagBuilder.new("flagkey").version(100).off_with_value("value").build - store = InMemoryFeatureStore.new - store.upsert(FEATURES, flag) + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + logger = double().as_null_object - with_client(test_config(feature_store: store, logger: logger)) do |client| + with_client(test_config(data_source: td, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) - client.variation_detail(flag[:key], { key: nil }, "default") + client.variation_detail("flagkey", { key: nil }, "default") end end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index ae4e948c..7929713d 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -73,8 +73,11 @@ module LaunchDarkly it "passes data set to feature store in correct order on init" do store = CapturingFeatureStore.new - data_source_factory = MockDataSource.factory_with_data(dependency_ordering_test_data) - with_client(test_config(feature_store: store, data_source: data_source_factory)) do |client| + td = Integrations::TestData.data_source + dependency_ordering_test_data[FEATURES].each { |key, flag| td.use_preconfigured_flag(flag) } + dependency_ordering_test_data[SEGMENTS].each { |key, segment| td.use_preconfigured_segment(segment) } + + with_client(test_config(feature_store: store, data_source: td)) do |client| data = store.received_data expect(data).not_to be_nil expect(data.count).to eq(2) diff --git a/spec/mock_components.rb b/spec/mock_components.rb index 488f6b35..07dd851a 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -89,31 +89,6 @@ def setup_segment_for_user(user_key, segment, included) end end - class MockDataSource - def self.factory_with_data(data) - lambda { |sdk_key, config| MockDataSource.new(config.feature_store, data) } - end - - def initialize(store, data) - @store = store - @data = data - end - - def start - @store.init(@data) - ev = Concurrent::Event.new - ev.set - ev - end - - def stop - end - - def initialized? - true - end - end - class SimpleObserver def initialize(fn) @fn = fn From 3e4c8930c7e11607b5e1583c513aefb2a88e8af6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 9 Dec 2021 12:52:01 -0800 Subject: [PATCH 223/292] replace LaunchDarkly::FileDataSource with LaunchDarkly::Integrations::FileData --- lib/ldclient-rb/config.rb | 3 +- lib/ldclient-rb/file_data_source.rb | 309 +----------------- .../impl/integrations/file_data_source.rb | 212 ++++++++++++ lib/ldclient-rb/integrations.rb | 53 +-- lib/ldclient-rb/integrations/consul.rb | 7 + lib/ldclient-rb/integrations/dynamodb.rb | 8 + lib/ldclient-rb/integrations/file_data.rb | 108 ++++++ lib/ldclient-rb/integrations/redis.rb | 8 + lib/ldclient-rb/integrations/test_data.rb | 9 +- .../integrations/util/store_wrapper.rb | 5 + lib/ldclient-rb/interfaces.rb | 3 +- .../file_data_source_spec.rb | 8 +- spec/ldclient_spec.rb | 4 +- 13 files changed, 374 insertions(+), 363 deletions(-) create mode 100644 lib/ldclient-rb/impl/integrations/file_data_source.rb create mode 100644 lib/ldclient-rb/integrations/file_data.rb rename spec/{ => integrations}/file_data_source_spec.rb (96%) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 63c1997e..3cfbf882 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -260,7 +260,8 @@ def offline? # object. # # @return [LaunchDarkly::Interfaces::DataSource|lambda] - # @see FileDataSource + # @see LaunchDarkly::Integrations::FileData + # @see LaunchDarkly::Integrations::TestData # attr_reader :data_source diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index f58ddf7c..30440353 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -1,314 +1,23 @@ -require 'concurrent/atomics' -require 'json' -require 'yaml' -require 'pathname' +require "ldclient-rb/integrations/file_data" module LaunchDarkly - # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the - # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' - # gem has been provided by the host app. - # @private - @@have_listen = false - begin - require 'listen' - @@have_listen = true - rescue LoadError - end - - # @private - def self.have_listen? - @@have_listen - end - - # - # Provides a way to use local files as a source of feature flag state. This allows using a - # predetermined feature flag state without an actual LaunchDarkly connection. - # - # Reading flags from a file is only intended for pre-production environments. Production - # environments should always be configured to receive flag updates from LaunchDarkly. - # - # To use this component, call {FileDataSource#factory}, and store its return value in the - # {Config#data_source} property of your LaunchDarkly client configuration. In the options - # to `factory`, set `paths` to the file path(s) of your data file(s): - # - # file_source = FileDataSource.factory(paths: [ myFilePath ]) - # config = LaunchDarkly::Config.new(data_source: file_source) - # - # This will cause the client not to connect to LaunchDarkly to get feature flags. The - # client may still make network connections to send analytics events, unless you have disabled - # this with {Config#send_events} or {Config#offline?}. - # - # Flag data files can be either JSON or YAML. They contain an object with three possible - # properties: - # - # - `flags`: Feature flag definitions. - # - `flagValues`: Simplified feature flags that contain only a value. - # - `segments`: User segment definitions. - # - # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application - # and is subject to change. Rather than trying to construct these objects yourself, it is simpler - # to request existing flags directly from the LaunchDarkly server in JSON format, and use this - # output as the starting point for your file. In Linux you would do this: - # - # ``` - # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all - # ``` # - # The output will look something like this (but with many more properties): + # Deprecated entry point for the file data source feature. # - # { - # "flags": { - # "flag-key-1": { - # "key": "flag-key-1", - # "on": true, - # "variations": [ "a", "b" ] - # } - # }, - # "segments": { - # "segment-key-1": { - # "key": "segment-key-1", - # "includes": [ "user-key-1" ] - # } - # } - # } + # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}. # - # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported - # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to - # set specific flag keys to specific values. For that, you can use a much simpler format: - # - # { - # "flagValues": { - # "my-string-flag-key": "value-1", - # "my-boolean-flag-key": true, - # "my-integer-flag-key": 3 - # } - # } - # - # Or, in YAML: - # - # flagValues: - # my-string-flag-key: "value-1" - # my-boolean-flag-key: true - # my-integer-flag-key: 1 - # - # It is also possible to specify both "flags" and "flagValues", if you want some flags - # to have simple values and others to have complex behavior. However, it is an error to use the - # same flag key or segment key more than once, either in a single file or across multiple files. - # - # If the data source encounters any error in any file-- malformed content, a missing file, or a - # duplicate key-- it will not load flags from any of the files. + # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData}. # class FileDataSource # - # Returns a factory for the file data source component. - # - # @param options [Hash] the configuration options - # @option options [Array] :paths The paths of the source files for loading flag data. These - # may be absolute paths or relative to the current working directory. - # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Auto-updating will only - # work if all of the files you specified have valid directory paths at startup time. - # Note that the default implementation of this feature is based on polling the filesystem, - # which may not perform well. If you install the 'listen' gem (not included by default, to - # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be - # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. - # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for - # file modifications - used only if auto_update is true, and if the native file-watching - # mechanism from 'listen' is not being used. The default value is 1 second. - # @return an object that can be stored in {Config#data_source} + # Deprecated entry point for the file data source feature. # - def self.factory(options={}) - return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } - end - end - - # @private - class FileDataSourceImpl - def initialize(feature_store, logger, options={}) - @feature_store = feature_store - @logger = logger - @paths = options[:paths] || [] - if @paths.is_a? String - @paths = [ @paths ] - end - @auto_update = options[:auto_update] - if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests - # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). - # Therefore, on that platform we'll fall back to file polling instead. - if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") - @use_listen = false - else - @use_listen = true - end - end - @poll_interval = options[:poll_interval] || 1 - @initialized = Concurrent::AtomicBoolean.new(false) - @ready = Concurrent::Event.new - end - - def initialized? - @initialized.value - end - - def start - ready = Concurrent::Event.new - - # We will return immediately regardless of whether the file load succeeded or failed - - # the difference can be detected by checking "initialized?" - ready.set - - load_all - - if @auto_update - # If we're going to watch files, then the start event will be set the first time we get - # a successful load. - @listener = start_listener - end - - ready - end - - def stop - @listener.stop if !@listener.nil? - end - - private - - def load_all - all_data = { - FEATURES => {}, - SEGMENTS => {} - } - @paths.each do |path| - begin - load_file(path, all_data) - rescue => exn - Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) - return - end - end - @feature_store.init(all_data) - @initialized.make_true - end - - def load_file(path, all_data) - parsed = parse_content(IO.read(path)) - (parsed[:flags] || {}).each do |key, flag| - add_item(all_data, FEATURES, flag) - end - (parsed[:flagValues] || {}).each do |key, value| - add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) - end - (parsed[:segments] || {}).each do |key, segment| - add_item(all_data, SEGMENTS, segment) - end - end - - def parse_content(content) - # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while - # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least - # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.safe_load(content)) - end - - def symbolize_all_keys(value) - # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and - # the SDK expects all objects to be formatted that way. - if value.is_a?(Hash) - value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h - elsif value.is_a?(Array) - value.map{ |v| symbolize_all_keys(v) } - else - value - end - end - - def add_item(all_data, kind, item) - items = all_data[kind] - raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash - key = item[:key].to_sym - if !items[key].nil? - raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" - end - items[key] = item - end - - def make_flag_with_value(key, value) - { - key: key, - on: true, - fallthrough: { variation: 0 }, - variations: [ value ] - } - end - - def start_listener - resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } - if @use_listen - start_listener_with_listen_gem(resolved_paths) - else - FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) - end - end - - def start_listener_with_listen_gem(resolved_paths) - path_set = resolved_paths.to_set - dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - opts = { latency: @poll_interval } - l = Listen.to(*dir_paths, opts) do |modified, added, removed| - paths = modified + added + removed - if paths.any? { |p| path_set.include?(p) } - load_all - end - end - l.start - l - end - + # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}. # - # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData#data_source}. # - class FileDataSourcePoller - def initialize(resolved_paths, interval, reloader, logger) - @stopped = Concurrent::AtomicBoolean.new(false) - get_file_times = Proc.new do - ret = {} - resolved_paths.each do |path| - begin - ret[path] = File.mtime(path) - rescue Errno::ENOENT - ret[path] = nil - end - end - ret - end - last_times = get_file_times.call - @thread = Thread.new do - while true - sleep interval - break if @stopped.value - begin - new_times = get_file_times.call - changed = false - last_times.each do |path, old_time| - new_time = new_times[path] - if !new_time.nil? && new_time != old_time - changed = true - break - end - end - reloader.call if changed - rescue => exn - Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) - end - end - end - end - - def stop - @stopped.make_true - @thread.run # wakes it up if it's sleeping - end + def self.factory(options={}) + LaunchDarkly::Integrations::FileData.data_source(options) end end end diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb new file mode 100644 index 00000000..d89e4e95 --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -0,0 +1,212 @@ +require 'ldclient-rb/in_memory_store' +require 'ldclient-rb/util' + +require 'concurrent/atomics' +require 'json' +require 'yaml' +require 'pathname' + +module LaunchDarkly + module Impl + module Integrations + class FileDataSourceImpl + # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the + # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' + # gem has been provided by the host app. + @@have_listen = false + begin + require 'listen' + @@have_listen = true + rescue LoadError + end + + def initialize(feature_store, logger, options={}) + @feature_store = feature_store + @logger = logger + @paths = options[:paths] || [] + if @paths.is_a? String + @paths = [ @paths ] + end + @auto_update = options[:auto_update] + if @auto_update && @@have_listen && !options[:force_polling] # force_polling is used only for tests + # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). + # Therefore, on that platform we'll fall back to file polling instead. + if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") + @use_listen = false + else + @use_listen = true + end + end + @poll_interval = options[:poll_interval] || 1 + @initialized = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + end + + def initialized? + @initialized.value + end + + def start + ready = Concurrent::Event.new + + # We will return immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking "initialized?" + ready.set + + load_all + + if @auto_update + # If we're going to watch files, then the start event will be set the first time we get + # a successful load. + @listener = start_listener + end + + ready + end + + def stop + @listener.stop if !@listener.nil? + end + + private + + def load_all + all_data = { + FEATURES => {}, + SEGMENTS => {} + } + @paths.each do |path| + begin + load_file(path, all_data) + rescue => exn + LaunchDarkly::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) + return + end + end + @feature_store.init(all_data) + @initialized.make_true + end + + def load_file(path, all_data) + parsed = parse_content(IO.read(path)) + (parsed[:flags] || {}).each do |key, flag| + add_item(all_data, FEATURES, flag) + end + (parsed[:flagValues] || {}).each do |key, value| + add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) + end + (parsed[:segments] || {}).each do |key, segment| + add_item(all_data, SEGMENTS, segment) + end + end + + def parse_content(content) + # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while + # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least + # for all the samples of actual flag data that we've tested). + symbolize_all_keys(YAML.safe_load(content)) + end + + def symbolize_all_keys(value) + # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and + # the SDK expects all objects to be formatted that way. + if value.is_a?(Hash) + value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h + elsif value.is_a?(Array) + value.map{ |v| symbolize_all_keys(v) } + else + value + end + end + + def add_item(all_data, kind, item) + items = all_data[kind] + raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash + key = item[:key].to_sym + if !items[key].nil? + raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" + end + items[key] = item + end + + def make_flag_with_value(key, value) + { + key: key, + on: true, + fallthrough: { variation: 0 }, + variations: [ value ] + } + end + + def start_listener + resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + if @use_listen + start_listener_with_listen_gem(resolved_paths) + else + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) + end + end + + def start_listener_with_listen_gem(resolved_paths) + path_set = resolved_paths.to_set + dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq + opts = { latency: @poll_interval } + l = Listen.to(*dir_paths, opts) do |modified, added, removed| + paths = modified + added + removed + if paths.any? { |p| path_set.include?(p) } + load_all + end + end + l.start + l + end + + # + # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # + class FileDataSourcePoller + def initialize(resolved_paths, interval, reloader, logger) + @stopped = Concurrent::AtomicBoolean.new(false) + get_file_times = Proc.new do + ret = {} + resolved_paths.each do |path| + begin + ret[path] = File.mtime(path) + rescue Errno::ENOENT + ret[path] = nil + end + end + ret + end + last_times = get_file_times.call + @thread = Thread.new do + while true + sleep interval + break if @stopped.value + begin + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end + end + reloader.call if changed + rescue => exn + LaunchDarkly::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) + end + end + end + end + + def stop + @stopped.make_true + @thread.run # wakes it up if it's sleeping + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index fccea008..2a2ac216 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,57 +1,6 @@ require "ldclient-rb/integrations/consul" require "ldclient-rb/integrations/dynamodb" +require "ldclient-rb/integrations/file_data" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/test_data" require "ldclient-rb/integrations/util/store_wrapper" - -module LaunchDarkly - # - # Tools for connecting the LaunchDarkly client to other software. - # - module Integrations - # - # Integration with [Consul](https://www.consul.io/). - # - # Note that in order to use this integration, you must first install the gem `diplomat`. - # - # @since 5.5.0 - # - module Consul - # code is in ldclient-rb/impl/integrations/consul_impl - end - - # - # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). - # - # Note that in order to use this integration, you must first install one of the AWS SDK gems: either - # `aws-sdk-dynamodb`, or the full `aws-sdk`. - # - # @since 5.5.0 - # - module DynamoDB - # code is in ldclient-rb/impl/integrations/dynamodb_impl - end - - # - # Integration with [Redis](https://redis.io/). - # - # Note that in order to use this integration, you must first install the `redis` and `connection-pool` - # gems. - # - # @since 5.5.0 - # - module Redis - # code is in ldclient-rb/impl/integrations/redis_impl - end - - # - # Support code that may be helpful in creating integrations. - # - # @since 5.5.0 - # - module Util - # code is in ldclient-rb/integrations/util/ - end - - end -end diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 4f32d5fd..020c31b4 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -3,6 +3,13 @@ module LaunchDarkly module Integrations + # + # Integration with [Consul](https://www.consul.io/). + # + # Note that in order to use this integration, you must first install the gem `diplomat`. + # + # @since 5.5.0 + # module Consul # # Default value for the `prefix` option for {new_feature_store}. diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 2f6c4ba1..229a64af 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -3,6 +3,14 @@ module LaunchDarkly module Integrations + # + # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). + # + # Note that in order to use this integration, you must first install one of the AWS SDK gems: either + # `aws-sdk-dynamodb`, or the full `aws-sdk`. + # + # @since 5.5.0 + # module DynamoDB # # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can diff --git a/lib/ldclient-rb/integrations/file_data.rb b/lib/ldclient-rb/integrations/file_data.rb new file mode 100644 index 00000000..370d3aa6 --- /dev/null +++ b/lib/ldclient-rb/integrations/file_data.rb @@ -0,0 +1,108 @@ +require 'ldclient-rb/impl/integrations/file_data_source' + +module LaunchDarkly + module Integrations + # + # Provides a way to use local files as a source of feature flag state. This allows using a + # predetermined feature flag state without an actual LaunchDarkly connection. + # + # Reading flags from a file is only intended for pre-production environments. Production + # environments should always be configured to receive flag updates from LaunchDarkly. + # + # To use this component, call {FileData#data_source}, and store its return value in the + # {Config#data_source} property of your LaunchDarkly client configuration. In the options + # to `data_source`, set `paths` to the file path(s) of your data file(s): + # + # file_source = LaunchDarkly::Integrations::FileData.data_source(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(data_source: file_source) + # + # This will cause the client not to connect to LaunchDarkly to get feature flags. The + # client may still make network connections to send analytics events, unless you have disabled + # this with {Config#send_events} or {Config#offline?}. + # + # Flag data files can be either JSON or YAML. They contain an object with three possible + # properties: + # + # - `flags`: Feature flag definitions. + # - `flagValues`: Simplified feature flags that contain only a value. + # - `segments`: User segment definitions. + # + # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application + # and is subject to change. Rather than trying to construct these objects yourself, it is simpler + # to request existing flags directly from the LaunchDarkly server in JSON format, and use this + # output as the starting point for your file. In Linux you would do this: + # + # ``` + # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all + # ``` + # + # The output will look something like this (but with many more properties): + # + # { + # "flags": { + # "flag-key-1": { + # "key": "flag-key-1", + # "on": true, + # "variations": [ "a", "b" ] + # } + # }, + # "segments": { + # "segment-key-1": { + # "key": "segment-key-1", + # "includes": [ "user-key-1" ] + # } + # } + # } + # + # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + # set specific flag keys to specific values. For that, you can use a much simpler format: + # + # { + # "flagValues": { + # "my-string-flag-key": "value-1", + # "my-boolean-flag-key": true, + # "my-integer-flag-key": 3 + # } + # } + # + # Or, in YAML: + # + # flagValues: + # my-string-flag-key: "value-1" + # my-boolean-flag-key: true + # my-integer-flag-key: 1 + # + # It is also possible to specify both "flags" and "flagValues", if you want some flags + # to have simple values and others to have complex behavior. However, it is an error to use the + # same flag key or segment key more than once, either in a single file or across multiple files. + # + # If the data source encounters any error in any file-- malformed content, a missing file, or a + # duplicate key-- it will not load flags from any of the files. + # + module FileData + # + # Returns a factory for the file data source component. + # + # @param options [Hash] the configuration options + # @option options [Array] :paths The paths of the source files for loading flag data. These + # may be absolute paths or relative to the current working directory. + # @option options [Boolean] :auto_update True if the data source should watch for changes to + # the source file(s) and reload flags whenever there is a change. Auto-updating will only + # work if all of the files you specified have valid directory paths at startup time. + # Note that the default implementation of this feature is based on polling the filesystem, + # which may not perform well. If you install the 'listen' gem (not included by default, to + # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be + # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. + # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for + # file modifications - used only if auto_update is true, and if the native file-watching + # mechanism from 'listen' is not being used. The default value is 1 second. + # @return an object that can be stored in {Config#data_source} + # + def self.data_source(options={}) + return lambda { |sdk_key, config| + Impl::Integrations::FileDataSourceImpl.new(config.feature_store, config.logger, options) } + end + end + end +end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 74af507a..6fed732d 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -2,6 +2,14 @@ module LaunchDarkly module Integrations + # + # Integration with [Redis](https://redis.io/). + # + # Note that in order to use this integration, you must first install the `redis` and `connection-pool` + # gems. + # + # @since 5.5.0 + # module Redis # # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index dc9612c8..8cbcc980 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -1,15 +1,16 @@ -require 'concurrent/atomics' require 'ldclient-rb/impl/integrations/test_data/test_data_source' require 'ldclient-rb/integrations/test_data/flag_builder' +require 'concurrent/atomics' + module LaunchDarkly module Integrations # # A mechanism for providing dynamically updatable feature flag state in a simplified form to an SDK # client in test scenarios. # - # Unlike {FileDataSource}, this mechanism does not use any external resources. It provides only - # the data that the application has put into it using the {#update} method. + # Unlike {LaunchDarkly::Integrations::FileData}, this mechanism does not use any external resources. It + # provides only the data that the application has put into it using the {#update} method. # # @example # td = LaunchDarkly::Integrations::TestData.data_source @@ -29,6 +30,8 @@ module Integrations # If the same `TestData` instance is used to configure multiple `LDClient` instances, # any changes made to the data will propagate to all of the `LDClient`s. # + # @since 6.3.0 + # class TestData # Creates a new instance of the test data source. # diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 26318d67..c94ace94 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -4,6 +4,11 @@ module LaunchDarkly module Integrations + # + # Support code that may be helpful in creating integrations. + # + # @since 5.5.0 + # module Util # # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 5a86ee23..b62a90fb 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -121,7 +121,8 @@ def stop # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one - # except for testing purposes. {FileDataSource} provides one such test fixture. + # except for testing purposes. Two such test fixtures are {LaunchDarkly::Integrations::FileData} + # and {LaunchDarkly::Integrations::TestData}. # module DataSource # diff --git a/spec/file_data_source_spec.rb b/spec/integrations/file_data_source_spec.rb similarity index 96% rename from spec/file_data_source_spec.rb rename to spec/integrations/file_data_source_spec.rb index 212d057b..ce756fb6 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/integrations/file_data_source_spec.rb @@ -9,7 +9,7 @@ def []=(key, value) end end -describe LaunchDarkly::FileDataSource do +describe LaunchDarkly::Integrations::FileData do let(:full_flag_1_key) { "flag1" } let(:full_flag_1_value) { "on" } let(:flag_value_1_key) { "flag2" } @@ -114,7 +114,7 @@ def make_temp_file(content) end def with_data_source(options) - factory = LaunchDarkly::FileDataSource.factory(options) + factory = LaunchDarkly::Integrations::FileData.data_source(options) ds = factory.call('', @config) begin yield ds @@ -246,7 +246,7 @@ def test_auto_reload(options) it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) - factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + factory = LaunchDarkly::Integrations::FileData.data_source({ paths: file.path }) config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) @@ -260,7 +260,7 @@ def test_auto_reload(options) it "evaluates full flag with client as expected" do file = make_temp_file(all_properties_json) - factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + factory = LaunchDarkly::Integrations::FileData.data_source({ paths: file.path }) config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 7929713d..ef689deb 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -15,7 +15,7 @@ module LaunchDarkly end it "is not enforced if using file data and send_events is false" do - source = FileDataSource.factory({}) + source = LaunchDarkly::Integrations::FileData.data_source({}) subject.new(nil, Config.new({ data_source: source, send_events: false })) end @@ -38,7 +38,7 @@ module LaunchDarkly end it "is enforced if using file data and send_events is true" do - source = FileDataSource.factory({}) + source = LaunchDarkly::Integrations::FileData.data_source({}) expect { subject.new(nil, Config.new({ data_source: source })) }.to raise_error(ArgumentError) From 3d35964ca7904b27a3746bbe3be1991d11e545fa Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 31 Dec 2021 13:09:24 -0800 Subject: [PATCH 224/292] update ruby-eventsource version for recent SSE fixes --- launchdarkly-server-sdk.gemspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index d2be98d1..67125390 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,7 +36,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" - spec.add_runtime_dependency "ld-eventsource", "2.1.1" + spec.add_runtime_dependency "ld-eventsource", "2.2.0" # Please keep ld-eventsource dependency as an exact version so that bugfixes to # that LD library are always associated with a new SDK version. From 5dadfe0b81b4cf6984876474e27cc5bb9a593f03 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 27 Jan 2022 14:38:51 -0500 Subject: [PATCH 225/292] Bump bundler version (#184) --- .circleci/config.yml | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8ddba394..16683e22 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -49,10 +49,10 @@ jobs: steps: - run: sudo apt-get update -y && sudo apt-get install -y build-essential - run: ruby -v - - run: gem install bundler -v 2.2.10 - - run: bundle _2.2.10_ install + - run: gem install bundler -v 2.2.33 + - run: bundle _2.2.33_ install - run: mkdir ./rspec - - run: bundle _2.2.10_ exec rspec --format documentation --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: bundle _2.2.33_ exec rspec --format documentation --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 67125390..bc4492a6 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -22,7 +22,7 @@ Gem::Specification.new do |spec| spec.required_ruby_version = ">= 2.5.0" spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" - spec.add_development_dependency "bundler", "2.2.10" + spec.add_development_dependency "bundler", "2.2.33" spec.add_development_dependency "rspec", "~> 3.10" spec.add_development_dependency "diplomat", "~> 2.4.2" spec.add_development_dependency "redis", "~> 4.2" From 4a63c452c1f7a9feb5297dbf0571035d69435468 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 27 Jan 2022 20:31:13 -0500 Subject: [PATCH 226/292] Add ability to to set initial reconnect delay (#183) --- lib/ldclient-rb/config.rb | 17 +++++++++++++++++ lib/ldclient-rb/stream.rb | 3 ++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 3cfbf882..ed33e08b 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -21,6 +21,7 @@ class Config # @option opts [Integer] :capacity (10000) See {#capacity}. # @option opts [Float] :flush_interval (30) See {#flush_interval}. # @option opts [Float] :read_timeout (10) See {#read_timeout}. + # @option opts [Float] :initial_reconnect_delay (1) See {#initial_reconnect_delay}. # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. # @option opts [Object] :cache_store See {#cache_store}. # @option opts [Object] :feature_store See {#feature_store}. @@ -54,6 +55,7 @@ def initialize(opts = {}) @flush_interval = opts[:flush_interval] || Config.default_flush_interval @connect_timeout = opts[:connect_timeout] || Config.default_connect_timeout @read_timeout = opts[:read_timeout] || Config.default_read_timeout + @initial_reconnect_delay = opts[:initial_reconnect_delay] || Config.default_initial_reconnect_delay @feature_store = opts[:feature_store] || Config.default_feature_store @stream = opts.has_key?(:stream) ? opts[:stream] : Config.default_stream @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @@ -180,6 +182,13 @@ def offline? # attr_reader :read_timeout + # + # The initial delay before reconnecting after an error in the SSE client. + # This only applies to the streaming connection. + # @return [Float] + # + attr_reader :initial_reconnect_delay + # # The connect timeout for network connections in seconds. # @return [Float] @@ -395,6 +404,14 @@ def self.default_read_timeout 10 end + # + # The default value for {#initial_reconnect_delay}. + # @return [Float] 1 + # + def self.default_initial_reconnect_delay + 1 + end + # # The default value for {#connect_timeout}. # @return [Float] 10 diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 64275b39..211e6321 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -47,7 +47,8 @@ def start headers: headers, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger, - socket_factory: @config.socket_factory + socket_factory: @config.socket_factory, + reconnect_time: @config.initial_reconnect_delay } log_connection_started @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| From 5e7cd710fffa94ce5060087effd095bc26b4e988 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 27 Jan 2022 20:37:07 -0500 Subject: [PATCH 227/292] Treat secondary as a built-in attribute (#180) --- lib/ldclient-rb/impl/evaluator_operators.rb | 2 +- spec/impl/evaluator_operators_spec.rb | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 77b0960b..e54368e9 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -89,7 +89,7 @@ def self.user_value(user, attribute) private - BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + BUILTINS = Set[:key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index ddf55cc7..5c447e6f 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -105,13 +105,13 @@ end describe "user_value" do - [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous, :some_custom_attr].each do |attr| + [:key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous, :some_custom_attr].each do |attr| it "returns nil if property #{attr} is not defined" do expect(subject::user_value({}, attr)).to be nil end end - [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name].each do |attr| + [:key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name].each do |attr| it "gets string value of string property #{attr}" do expect(subject::user_value({ attr => 'x' }, attr)).to eq 'x' end From 64b25497edd6c31912d0ab9f0ddcbb003898c355 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 27 Jan 2022 20:38:01 -0500 Subject: [PATCH 228/292] all_flags_state is invalid if store isn't initialized (#182) --- lib/ldclient-rb/ldclient.rb | 9 +++++++++ spec/ldclient_evaluation_spec.rb | 16 ++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a8719773..f0046421 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -338,6 +338,15 @@ def all_flags(user) def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? + if !initialized? + if @store.initialized? + @config.logger.warn { "Called all_flags_state before client initialization; using last known values from data store" } + else + @config.logger.warn { "Called all_flags_state before client initialization. Data store not available; returning empty state" } + return FeatureFlagsState.new(false) + end + end + unless user && !user[:key].nil? @config.logger.error { "[LDClient] User and user key must be specified in all_flags_state" } return FeatureFlagsState.new(false) diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index c63cb882..581f3256 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -301,6 +301,22 @@ module LaunchDarkly expect(state.values_map).to eq({}) end end + + it "returns empty state if store is not initialize" do + wait = double + expect(wait).to receive(:wait).at_least(:once) + + source = double + expect(source).to receive(:start).at_least(:once).and_return(wait) + expect(source).to receive(:stop).at_least(:once).and_return(wait) + expect(source).to receive(:initialized?).at_least(:once).and_return(false) + store = LaunchDarkly::InMemoryFeatureStore.new + with_client(test_config(store: store, data_source: source)) do |offline_client| + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end end end end From 275b005d718ddb61eeebe33ccb1ddda9bcb074f7 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 28 Jan 2022 13:35:21 -0500 Subject: [PATCH 229/292] identify should not emit events if user key is "" (#181) --- lib/ldclient-rb/ldclient.rb | 4 ++-- spec/ldclient_events_spec.rb | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f0046421..62b31d81 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -248,8 +248,8 @@ def variation_detail(key, user, default) # @return [void] # def identify(user) - if !user || user[:key].nil? - @config.logger.warn("Identify called with nil user or nil user key!") + if !user || user[:key].nil? || user[:key].empty? + @config.logger.warn("Identify called with nil user or empty user key!") return end sanitize_user(user) diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index 86eaa77d..b2afcc13 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -196,13 +196,13 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user key is nil" do + it "does not send event, and logs warning, if user key is blank" do logger = double().as_null_object with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:add_event) expect(logger).to receive(:warn) - client.identify({ key: nil }) + client.identify({ key: "" }) end end end From 32e74ed14590917739a5eb2733f822a0fd63055d Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 3 Feb 2022 15:50:59 -0500 Subject: [PATCH 230/292] Account for traffic allocation on all flags (#185) --- .circleci/config.yml | 2 +- lib/ldclient-rb/flags_state.rb | 35 ++++++++++++++-------- lib/ldclient-rb/impl/event_factory.rb | 21 ++++++-------- lib/ldclient-rb/ldclient.rb | 21 ++++++++++---- spec/flags_state_spec.rb | 42 +++++++++++++-------------- 5 files changed, 70 insertions(+), 51 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 16683e22..83bf0999 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -41,7 +41,7 @@ jobs: - when: condition: <> steps: - - run: gem install jruby-openssl # required by bundler, no effect on Ruby MRI + - run: gem install jruby-openssl -v 0.11.0 # required by bundler, no effect on Ruby MRI - run: apt-get update -y && apt-get install -y build-essential - when: condition: diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 496ad61b..50fcec88 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -16,21 +16,32 @@ def initialize(valid) # Used internally to build the state map. # @private - def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) - key = flag[:key] - @flag_values[key] = value + def add_flag(flag_state, with_reasons, details_only_if_tracked) + key = flag_state[:key] + @flag_values[key] = flag_state[:value] meta = {} - with_details = !details_only_if_tracked || flag[:trackEvents] - if !with_details && flag[:debugEventsUntilDate] - with_details = flag[:debugEventsUntilDate] > Impl::Util::current_time_millis + + omit_details = false + if details_only_if_tracked + if !flag_state[:trackEvents] && !flag_state[:trackReason] && !(flag_state[:debugEventsUntilDate] && flag_state[:debugEventsUntilDate] > Impl::Util::current_time_millis) + omit_details = true + end + end + + reason = (!with_reasons and !flag_state[:trackReason]) ? nil : flag_state[:reason] + + if !reason.nil? && !omit_details + meta[:reason] = reason end - if with_details - meta[:version] = flag[:version] - meta[:reason] = reason if !reason.nil? + + if !omit_details + meta[:version] = flag_state[:version] end - meta[:variation] = variation if !variation.nil? - meta[:trackEvents] = true if flag[:trackEvents] - meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + + meta[:variation] = flag_state[:variation] if !flag_state[:variation].nil? + meta[:trackEvents] = true if flag_state[:trackEvents] + meta[:trackReason] = true if flag_state[:trackReason] + meta[:debugEventsUntilDate] = flag_state[:debugEventsUntilDate] if flag_state[:debugEventsUntilDate] @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 691339d7..19b4e474 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -13,7 +13,7 @@ def initialize(with_reasons) end def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) - add_experiment_data = is_experiment(flag, detail.reason) + add_experiment_data = self.class.is_experiment(flag, detail.reason) e = { kind: 'feature', key: flag[:key], @@ -91,17 +91,7 @@ def new_custom_event(event_name, user, data, metric_value) e end - private - - def context_to_context_kind(user) - if !user.nil? && user[:anonymous] - return "anonymousUser" - else - return "user" - end - end - - def is_experiment(flag, reason) + def self.is_experiment(flag, reason) return false if !reason if reason.in_experiment @@ -121,6 +111,13 @@ def is_experiment(flag, reason) false end + private def context_to_context_kind(user) + if !user.nil? && user[:anonymous] + return "anonymousUser" + else + return "user" + end + end end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 62b31d81..573c964b 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -368,14 +368,25 @@ def all_flags_state(user, options={}) next end begin - result = @evaluator.evaluate(f, user, @event_factory_default) - state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, - details_only_if_tracked) + detail = @evaluator.evaluate(f, user, @event_factory_default).detail rescue => exn + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION)) Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil, with_reasons ? EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION) : nil, - details_only_if_tracked) end + + requires_experiment_data = EventFactory.is_experiment(f, detail.reason) + flag_state = { + key: f[:key], + value: detail.value, + variation: detail.variation_index, + reason: detail.reason, + version: f[:version], + trackEvents: f[:trackEvents] || requires_experiment_data, + trackReason: requires_experiment_data, + debugEventsUntilDate: f[:debugEventsUntilDate], + } + + state.add_flag(flag_state, with_reasons, details_only_if_tracked) end state diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index bda55b11..323c6c31 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -6,8 +6,8 @@ it "can get flag value" do state = subject.new(true) - flag = { key: 'key' } - state.add_flag(flag, 'value', 1) + flag_state = { key: 'key', value: 'value', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + state.add_flag(flag_state, false, false) expect(state.flag_value('key')).to eq 'value' end @@ -20,21 +20,21 @@ it "can be converted to values map" do state = subject.new(true) - flag1 = { key: 'key1' } - flag2 = { key: 'key2' } - state.add_flag(flag1, 'value1', 0) - state.add_flag(flag2, 'value2', 1) + flag_state1 = { key: 'key1', value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + flag_state2 = { key: 'key2', value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + state.add_flag(flag_state1, false, false) + state.add_flag(flag_state2, false, false) expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end it "can be converted to JSON structure" do state = subject.new(true) - flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } - flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } - state.add_flag(flag1, 'value1', 0) - state.add_flag(flag2, 'value2', 1) - + flag_state1 = { key: "key1", version: 100, trackEvents: false, value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + flag_state2 = { key: "key2", version: 200, trackEvents: true, debugEventsUntilDate: 1000, value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + state.add_flag(flag_state1, false, false) + state.add_flag(flag_state2, false, false) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', @@ -57,11 +57,11 @@ it "can be converted to JSON string" do state = subject.new(true) - flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } - flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } - state.add_flag(flag1, 'value1', 0) - state.add_flag(flag2, 'value2', 1) - + flag_state1 = { key: "key1", version: 100, trackEvents: false, value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + flag_state2 = { key: "key2", version: 200, trackEvents: true, debugEventsUntilDate: 1000, value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + state.add_flag(flag_state1, false, false) + state.add_flag(flag_state2, false, false) + object = state.as_json str = state.to_json expect(object.to_json).to eq(str) @@ -69,11 +69,11 @@ it "uses our custom serializer with JSON.generate" do state = subject.new(true) - flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } - flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } - state.add_flag(flag1, 'value1', 0) - state.add_flag(flag2, 'value2', 1) - + flag_state1 = { key: "key1", version: 100, trackEvents: false, value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + flag_state2 = { key: "key2", version: 200, trackEvents: true, debugEventsUntilDate: 1000, value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + state.add_flag(flag_state1, false, false) + state.add_flag(flag_state2, false, false) + stringFromToJson = state.to_json stringFromGenerate = JSON.generate(state) expect(stringFromGenerate).to eq(stringFromToJson) From 5af6e9aab4fe91508809af3b9d238e73aec9d938 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Wed, 16 Feb 2022 13:32:40 -0500 Subject: [PATCH 231/292] Add contract tests (#178) --- .circleci/config.yml | 23 +++++-- Makefile | 19 ++++++ contract-tests/Gemfile | 10 +++ contract-tests/README.md | 7 ++ contract-tests/client_entity.rb | 92 ++++++++++++++++++++++++++ contract-tests/service.rb | 112 ++++++++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 4 +- 7 files changed, 259 insertions(+), 8 deletions(-) create mode 100644 Makefile create mode 100644 contract-tests/Gemfile create mode 100644 contract-tests/README.md create mode 100644 contract-tests/client_entity.rb create mode 100644 contract-tests/service.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index 83bf0999..7ec25b1a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,8 +17,8 @@ workflows: name: Ruby 3.0 docker-image: cimg/ruby:3.0 - build-test-linux: - name: JRuby 9.2 - docker-image: jruby:9.2-jdk + name: JRuby 9.3 + docker-image: jruby:9.3-jdk jruby: true jobs: @@ -51,9 +51,20 @@ jobs: - run: ruby -v - run: gem install bundler -v 2.2.33 - run: bundle _2.2.33_ install - - run: mkdir ./rspec - - run: bundle _2.2.33_ exec rspec --format documentation --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: mkdir /tmp/circle-artifacts + - run: bundle _2.2.33_ exec rspec --format documentation --format RspecJunitFormatter -o /tmp/circle-artifacts/rspec.xml spec + + - when: + condition: + not: <> + steps: + - run: make build-contract-tests + - run: + command: make start-contract-test-service + background: true + - run: TEST_HARNESS_PARAMS="-junit /tmp/circle-artifacts/contract-tests-junit.xml" make run-contract-tests + - store_test_results: - path: ./rspec + path: /tmp/circle-artifacts - store_artifacts: - path: ./rspec + path: /tmp/circle-artifacts diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..5b264f57 --- /dev/null +++ b/Makefile @@ -0,0 +1,19 @@ +TEMP_TEST_OUTPUT=/tmp/contract-test-service.log + +build-contract-tests: + @cd contract-tests && bundle _2.2.33_ install + +start-contract-test-service: + @cd contract-tests && bundle _2.2.33_ exec ruby service.rb + +start-contract-test-service-bg: + @echo "Test service output will be captured in $(TEMP_TEST_OUTPUT)" + @make start-contract-test-service >$(TEMP_TEST_OUTPUT) 2>&1 & + +run-contract-tests: + @curl -s https://raw.githubusercontent.com/launchdarkly/sdk-test-harness/v1.0.0/downloader/run.sh \ + | VERSION=v1 PARAMS="-url http://localhost:9000 -debug -stop-service-at-end $(TEST_HARNESS_PARAMS)" sh + +contract-tests: build-contract-tests start-contract-test-service-bg run-contract-tests + +.PHONY: build-contract-tests start-contract-test-service run-contract-tests contract-tests diff --git a/contract-tests/Gemfile b/contract-tests/Gemfile new file mode 100644 index 00000000..48b8812f --- /dev/null +++ b/contract-tests/Gemfile @@ -0,0 +1,10 @@ +source 'https://rubygems.org' + +gem 'launchdarkly-server-sdk', path: '..' + +gem 'sinatra', '~> 2.1' +# Sinatra can work with several server frameworks. In JRuby, we have to use glassfish (which +# is only available in JRuby). Otherwise we use thin (which is not available in JRuby). +gem 'glassfish', :platforms => :jruby +gem 'thin', :platforms => :ruby +gem 'json' diff --git a/contract-tests/README.md b/contract-tests/README.md new file mode 100644 index 00000000..aa3942b8 --- /dev/null +++ b/contract-tests/README.md @@ -0,0 +1,7 @@ +# SDK contract test service + +This directory contains an implementation of the cross-platform SDK testing protocol defined by https://github.com/launchdarkly/sdk-test-harness. See that project's `README` for details of this protocol, and the kinds of SDK capabilities that are relevant to the contract tests. This code should not need to be updated unless the SDK has added or removed such capabilities. + +To run these tests locally, run `make contract-tests` from the SDK project root directory. This downloads the correct version of the test harness tool automatically. + +Or, to test against an in-progress local version of the test harness, run `make start-contract-test-service` from the SDK project root directory; then, in the root directory of the `sdk-test-harness` project, build the test harness and run it from the command line. diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb new file mode 100644 index 00000000..a9b7ccd5 --- /dev/null +++ b/contract-tests/client_entity.rb @@ -0,0 +1,92 @@ +require 'ld-eventsource' +require 'json' +require 'net/http' + +class ClientEntity + def initialize(log, config) + @log = log + + opts = {} + + opts[:logger] = log + + if config[:streaming] + streaming = config[:streaming] + opts[:stream_uri] = streaming[:baseUri] if !streaming[:baseUri].nil? + opts[:initial_reconnect_delay] = streaming[:initialRetryDelayMs] / 1_000.0 if !streaming[:initialRetryDelayMs].nil? + end + + if config[:events] + events = config[:events] + opts[:events_uri] = events[:baseUri] if events[:baseUri] + opts[:capacity] = events[:capacity] if events[:capacity] + opts[:diagnostic_opt_out] = !events[:enableDiagnostics] + opts[:all_attributes_private] = !!events[:allAttributesPrivate] + opts[:private_attribute_names] = events[:globalPrivateAttributes] + opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) if events.has_key? :flushIntervalMs + opts[:inline_users_in_events] = events[:inlineUsers] || false + else + opts[:send_events] = false + end + + startWaitTimeMs = config[:startWaitTimeMs] || 5_000 + + @client = LaunchDarkly::LDClient.new( + config[:credential], + LaunchDarkly::Config.new(opts), + startWaitTimeMs / 1_000.0) + end + + def initialized? + @client.initialized? + end + + def evaluate(params) + response = {} + + if params[:detail] + detail = @client.variation_detail(params[:flagKey], params[:user], params[:defaultValue]) + response[:value] = detail.value + response[:variationIndex] = detail.variation_index + response[:reason] = detail.reason + else + response[:value] = @client.variation(params[:flagKey], params[:user], params[:defaultValue]) + end + + response + end + + def evaluate_all(params) + opts = {} + opts[:client_side_only] = params[:clientSideOnly] || false + opts[:with_reasons] = params[:withReasons] || false + opts[:details_only_for_tracked_flags] = params[:detailsOnlyForTrackedFlags] || false + + @client.all_flags_state(params[:user], opts) + end + + def track(params) + @client.track(params[:eventKey], params[:user], params[:data], params[:metricValue]) + end + + def identify(params) + @client.identify(params[:user]) + end + + def alias(params) + @client.alias(params[:user], params[:previousUser]) + end + + def flush_events + @client.flush + end + + def log + @log + end + + def close + @client.close + @log.info("Test ended") + end +end diff --git a/contract-tests/service.rb b/contract-tests/service.rb new file mode 100644 index 00000000..54cc0b73 --- /dev/null +++ b/contract-tests/service.rb @@ -0,0 +1,112 @@ +require 'launchdarkly-server-sdk' +require 'json' +require 'logger' +require 'net/http' +require 'sinatra' + +require './client_entity.rb' + +configure :development do + disable :show_exceptions +end + +$log = Logger.new(STDOUT) +$log.formatter = proc {|severity, datetime, progname, msg| + "[GLOBAL] #{datetime.strftime('%Y-%m-%d %H:%M:%S.%3N')} #{severity} #{progname} #{msg}\n" +} + +set :port, 9000 +set :logging, false + +clients = {} +clientCounter = 0 + +get '/' do + { + capabilities: [ + 'server-side', + 'all-flags-with-reasons', + 'all-flags-client-side-only', + 'all-flags-details-only-for-tracked-flags', + ] + }.to_json +end + +delete '/' do + $log.info("Test service has told us to exit") + Thread.new { sleep 1; exit } + return 204 +end + +post '/' do + opts = JSON.parse(request.body.read, :symbolize_names => true) + tag = "[#{opts[:tag]}]" + + clientCounter += 1 + clientId = clientCounter.to_s + + log = Logger.new(STDOUT) + log.formatter = proc {|severity, datetime, progname, msg| + "#{tag} #{datetime.strftime('%Y-%m-%d %H:%M:%S.%3N')} #{severity} #{progname} #{msg}\n" + } + + log.info("Starting client") + log.debug("Parameters: #{opts}") + + client = ClientEntity.new(log, opts[:configuration]) + + if !client.initialized? && opts[:configuration][:initCanFail] == false + client.close() + return [500, nil, "Failed to initialize"] + end + + clientResourceUrl = "/clients/#{clientId}" + clients[clientId] = client + return [201, {'Location' => clientResourceUrl}, nil] +end + +post '/clients/:id' do |clientId| + client = clients[clientId] + return 404 if client.nil? + + params = JSON.parse(request.body.read, :symbolize_names => true) + + client.log.info("Processing request for client #{clientId}") + client.log.debug("Parameters: #{params}") + + case params[:command] + when "evaluate" + response = client.evaluate(params[:evaluate]) + return [200, nil, response.to_json] + when "evaluateAll" + response = {:state => client.evaluate_all(params[:evaluateAll])} + return [200, nil, response.to_json] + when "customEvent" + client.track(params[:customEvent]) + return 201 + when "identifyEvent" + client.identify(params[:identifyEvent]) + return 201 + when "aliasEvent" + client.alias(params[:aliasEvent]) + return 201 + when "flushEvents" + client.flush_events + return 201 + end + + return [400, nil, {:error => "Unknown command requested"}.to_json] +end + +delete '/clients/:id' do |clientId| + client = clients[clientId] + return 404 if client.nil? + clients.delete(clientId) + client.close + + return 204 +end + +error do + env['sinatra.error'].message +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 573c964b..b5e5ead9 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -65,7 +65,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) get_segment = lambda { |key| @store.get(SEGMENTS, key) } get_big_segments_membership = lambda { |key| @big_segment_store_manager.get_user_membership(key) } @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, get_big_segments_membership, @config.logger) - + if !@config.offline? && @config.send_events && !@config.diagnostic_opt_out? diagnostic_accumulator = Impl::DiagnosticAccumulator.new(Impl::DiagnosticAccumulator.create_diagnostic_id(sdk_key)) else @@ -178,7 +178,7 @@ def initialized? # Other supported user attributes include IP address, country code, and an arbitrary hash of # custom attributes. For more about the supported user properties and how they work in # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/home/flags/targeting-users). - # + # # The optional `:privateAttributeNames` user property allows you to specify a list of # attribute names that should not be sent back to LaunchDarkly. # [Private attributes](https://docs.launchdarkly.com/home/users/attributes#creating-private-user-attributes) From baca84bec92ca373764fd9367c91703e6da8c835 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 18 Feb 2022 15:19:36 -0500 Subject: [PATCH 232/292] Fix string interpolation in log message (#187) --- lib/ldclient-rb/impl/integrations/dynamodb_impl.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 4085e53d..7244fc9b 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -35,7 +35,7 @@ def initialize(table_name, opts) @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end - @logger.info("${description}: using DynamoDB table \"#{table_name}\"") + @logger.info("#{description}: using DynamoDB table \"#{table_name}\"") end def stop From 4a38c404c3fa2a3f7777dafc7ac19454ca6ba468 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 3 Mar 2022 14:19:28 -0500 Subject: [PATCH 233/292] Default opts to empty hash when creating persistent feature store (#186) --- lib/ldclient-rb/integrations/consul.rb | 2 +- lib/ldclient-rb/integrations/dynamodb.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 020c31b4..b3947047 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -36,7 +36,7 @@ def self.default_prefix # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_feature_store(opts, &block) + def self.new_feature_store(opts = {}) core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 229a64af..29aedcdb 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -46,7 +46,7 @@ module DynamoDB # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_feature_store(table_name, opts) + def self.new_feature_store(table_name, opts = {}) core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 6fed732d..95147286 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -58,7 +58,7 @@ def self.default_prefix # lifecycle to be independent of the SDK client # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_feature_store(opts) + def self.new_feature_store(opts = {}) return RedisFeatureStore.new(opts) end From b9432e9b0dbaee0efa41cba0b22920374de44d81 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 3 Mar 2022 14:43:13 -0500 Subject: [PATCH 234/292] Remove Hakiri badge from README (#188) Hakiri was sunset on January 31st, 2022 at which time our badge stopped working. --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 8125c068..17e3bfc5 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,6 @@ LaunchDarkly Server-side SDK for Ruby [![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) -[![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) [![RubyDoc](https://img.shields.io/static/v1?label=docs+-+all+versions&message=reference&color=00add8)](https://www.rubydoc.info/gems/launchdarkly-server-sdk) [![GitHub Pages](https://img.shields.io/static/v1?label=docs+-+latest&message=reference&color=00add8)](https://launchdarkly.github.io/ruby-server-sdk) From f2c2228b7a577fc0611b1535c4edc1598c28581b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Mar 2022 15:30:22 -0700 Subject: [PATCH 235/292] detect http/https proxy env vars when creating HTTP clients --- lib/ldclient-rb/util.rb | 11 ++++++++++- spec/event_sender_spec.rb | 41 ++++++++++++++++++++++++--------------- spec/requestor_spec.rb | 27 +++++++++++++++----------- 3 files changed, 51 insertions(+), 28 deletions(-) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 7bd56959..24bd1b02 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -24,10 +24,19 @@ def self.new_http_client(uri_s, config) if config.socket_factory http_client_options["socket_class"] = config.socket_factory end + proxy = URI.parse(uri_s).find_proxy + if !proxy.nil? + http_client_options["proxy"] = { + proxy_address: proxy.host, + proxy_port: proxy.port, + proxy_username: proxy.user, + proxy_password: proxy.password + } + end return HTTP::Client.new(http_client_options) .timeout({ read: config.read_timeout, - connect: config.connect_timeout + connect: config.connect_timeout, }) .persistent(uri_s) end diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 31bfb6ae..72d19197 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -14,7 +14,11 @@ module Impl let(:fake_data) { '{"things":[]}' } def make_sender(server) - subject.new(sdk_key, Config.new(events_uri: server.base_uri.to_s, logger: $null_log), nil, 0.1) + make_sender_with_events_uri(server.base_uri.to_s) + end + + def make_sender_with_events_uri(events_uri) + subject.new(sdk_key, Config.new(events_uri: events_uri, logger: $null_log), nil, 0.1) end def with_sender_and_server @@ -105,25 +109,30 @@ def with_sender_and_server end it "can use a proxy server" do - with_server do |server| - server.setup_ok_response("/bulk", "") - - with_server(StubProxyServer.new) do |proxy| - begin - ENV["http_proxy"] = proxy.base_uri.to_s + fake_target_uri = "http://request-will-not-really-go-here" + # Instead of a real proxy server, we just create a basic test HTTP server that + # pretends to be a proxy. The proof that the proxy logic is working correctly is + # that the request goes to that server, instead of to fake_target_uri. We can't + # use a real proxy that really forwards requests to another test server, because + # that test server would be at localhost, and proxy environment variables are + # ignored if the target is localhost. + with_server do |proxy| + proxy.setup_ok_response("/bulk", "") - es = make_sender(server) + begin + ENV["http_proxy"] = proxy.base_uri.to_s - result = es.send_event_data(fake_data, "", false) - - expect(result.success).to be true + es = make_sender_with_events_uri(fake_target_uri) - req, body = server.await_request_with_body - expect(body).to eq fake_data - ensure - ENV["http_proxy"] = nil - end + result = es.send_event_data(fake_data, "", false) + + expect(result.success).to be true + ensure + ENV["http_proxy"] = nil end + + req, body = proxy.await_request_with_body + expect(body).to eq fake_data end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index c224b22a..65ec7ed3 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -189,19 +189,24 @@ def with_requestor(base_uri, opts = {}) end it "can use a proxy server" do + fake_target_uri = "http://request-will-not-really-go-here" + # Instead of a real proxy server, we just create a basic test HTTP server that + # pretends to be a proxy. The proof that the proxy logic is working correctly is + # that the request goes to that server, instead of to fake_target_uri. We can't + # use a real proxy that really forwards requests to another test server, because + # that test server would be at localhost, and proxy environment variables are + # ignored if the target is localhost. expected_data = { flags: { flagkey: { key: "flagkey" } } } - with_server do |server| - server.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json", { "etag" => "x" }) - with_server(StubProxyServer.new) do |proxy| - begin - ENV["http_proxy"] = proxy.base_uri.to_s - with_requestor(server.base_uri.to_s) do |requestor| - data = requestor.request_all_data - expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(expected_data)) - end - ensure - ENV["http_proxy"] = nil + with_server do |proxy| + proxy.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json", { "etag" => "x" }) + begin + ENV["http_proxy"] = proxy.base_uri.to_s + with_requestor(fake_target_uri) do |requestor| + data = requestor.request_all_data + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(expected_data)) end + ensure + ENV["http_proxy"] = nil end end end From aef4ead84683c3d7014039e5948c2c06a3163c2a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Mar 2022 15:33:48 -0700 Subject: [PATCH 236/292] rever accidental change --- lib/ldclient-rb/util.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 24bd1b02..5aac9d1e 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -36,7 +36,7 @@ def self.new_http_client(uri_s, config) return HTTP::Client.new(http_client_options) .timeout({ read: config.read_timeout, - connect: config.connect_timeout, + connect: config.connect_timeout }) .persistent(uri_s) end From 1bd1faecf14fa105c0a42dcf595b400dd881b3f1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 3 May 2022 14:40:00 -0700 Subject: [PATCH 237/292] fix nil safety in test service config --- contract-tests/client_entity.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index a9b7ccd5..699d8e72 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -23,7 +23,7 @@ def initialize(log, config) opts[:diagnostic_opt_out] = !events[:enableDiagnostics] opts[:all_attributes_private] = !!events[:allAttributesPrivate] opts[:private_attribute_names] = events[:globalPrivateAttributes] - opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) if events.has_key? :flushIntervalMs + opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) if !events[:flushIntervalMs].nil? opts[:inline_users_in_events] = events[:inlineUsers] || false else opts[:send_events] = false From 7ba54a7bb2dfa8eafe8c20600b26e73065337516 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 5 May 2022 09:12:24 -0400 Subject: [PATCH 238/292] master -> main (#190) --- .github/pull_request_template.md | 2 +- .ldrelease/config.yml | 2 +- README.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 19806760..fc89ce0f 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,7 +1,7 @@ **Requirements** - [ ] I have added test coverage for new or changed functionality -- [ ] I have followed the repository's [pull request submission guidelines](../blob/master/CONTRIBUTING.md#submitting-pull-requests) +- [ ] I have followed the repository's [pull request submission guidelines](../blob/main/CONTRIBUTING.md#submitting-pull-requests) - [ ] I have validated my changes against all supported platform versions **Related issues** diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index 7c44d0e5..937e236d 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -5,7 +5,7 @@ repo: private: ruby-server-sdk-private branches: - - name: master + - name: main - name: 5.x publications: diff --git a/README.md b/README.md index 17e3bfc5..bef2029f 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ LaunchDarkly Server-side SDK for Ruby [![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk) -[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) +[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/main.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) [![RubyDoc](https://img.shields.io/static/v1?label=docs+-+all+versions&message=reference&color=00add8)](https://www.rubydoc.info/gems/launchdarkly-server-sdk) [![GitHub Pages](https://img.shields.io/static/v1?label=docs+-+latest&message=reference&color=00add8)](https://launchdarkly.github.io/ruby-server-sdk) From 518709c8d457ea4abd02d9d83b07ea119510798c Mon Sep 17 00:00:00 2001 From: Matthew Keeler Date: Thu, 5 May 2022 09:14:42 -0400 Subject: [PATCH 239/292] master -> main --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bef2029f..87cbc5f1 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ LaunchDarkly Server-side SDK for Ruby [![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk) -[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/main.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) +[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/main.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/main) [![RubyDoc](https://img.shields.io/static/v1?label=docs+-+all+versions&message=reference&color=00add8)](https://www.rubydoc.info/gems/launchdarkly-server-sdk) [![GitHub Pages](https://img.shields.io/static/v1?label=docs+-+latest&message=reference&color=00add8)](https://launchdarkly.github.io/ruby-server-sdk) From 2152220de6fb81f4402300f69307b162e4b012a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 15 Jun 2022 12:38:55 -0700 Subject: [PATCH 240/292] update ruby-eventsource version for parsing efficiency fix --- launchdarkly-server-sdk.gemspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index bc4492a6..04262469 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,7 +36,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" - spec.add_runtime_dependency "ld-eventsource", "2.2.0" + spec.add_runtime_dependency "ld-eventsource", "2.2.1" # Please keep ld-eventsource dependency as an exact version so that bugfixes to # that LD library are always associated with a new SDK version. From 654019c022a542b43886d77eb666cbc8b7f75d9a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Jun 2022 16:26:07 -0700 Subject: [PATCH 241/292] miscellaneous optimizations for event processing (#193) --- lib/ldclient-rb.rb | 1 - lib/ldclient-rb/event_summarizer.rb | 55 ---- lib/ldclient-rb/events.rb | 308 +++++++++++++++-------- lib/ldclient-rb/impl/evaluator.rb | 31 ++- lib/ldclient-rb/impl/event_factory.rb | 123 --------- lib/ldclient-rb/impl/event_summarizer.rb | 63 +++++ lib/ldclient-rb/impl/event_types.rb | 90 +++++++ lib/ldclient-rb/ldclient.rb | 100 ++++++-- spec/event_summarizer_spec.rb | 63 ----- spec/events_spec.rb | 295 ++++++++++------------ spec/events_test_util.rb | 19 ++ spec/impl/evaluator_big_segments_spec.rb | 14 +- spec/impl/evaluator_clause_spec.rb | 12 +- spec/impl/evaluator_rule_spec.rb | 34 +-- spec/impl/evaluator_segment_spec.rb | 6 +- spec/impl/evaluator_spec.rb | 91 +++---- spec/impl/evaluator_spec_base.rb | 4 - spec/impl/event_factory_spec.rb | 108 -------- spec/impl/event_summarizer_spec.rb | 84 +++++++ spec/ldclient_events_spec.rb | 122 ++++----- spec/spec_helper.rb | 3 + 21 files changed, 808 insertions(+), 818 deletions(-) delete mode 100644 lib/ldclient-rb/event_summarizer.rb delete mode 100644 lib/ldclient-rb/impl/event_factory.rb create mode 100644 lib/ldclient-rb/impl/event_summarizer.rb create mode 100644 lib/ldclient-rb/impl/event_types.rb delete mode 100644 spec/event_summarizer_spec.rb create mode 100644 spec/events_test_util.rb delete mode 100644 spec/impl/event_factory_spec.rb create mode 100644 spec/impl/event_summarizer_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 9a215686..2bff8c8f 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -21,7 +21,6 @@ module LaunchDarkly require "ldclient-rb/user_filter" require "ldclient-rb/simple_lru_cache" require "ldclient-rb/non_blocking_thread_pool" -require "ldclient-rb/event_summarizer" require "ldclient-rb/events" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/event_summarizer.rb b/lib/ldclient-rb/event_summarizer.rb deleted file mode 100644 index c48a400f..00000000 --- a/lib/ldclient-rb/event_summarizer.rb +++ /dev/null @@ -1,55 +0,0 @@ - -module LaunchDarkly - # @private - EventSummary = Struct.new(:start_date, :end_date, :counters) - - # Manages the state of summarizable information for the EventProcessor, including the - # event counters and user deduplication. Note that the methods of this class are - # deliberately not thread-safe; the EventProcessor is responsible for enforcing - # synchronization across both the summarizer and the event queue. - # - # @private - class EventSummarizer - def initialize - clear - end - - # Adds this event to our counters, if it is a type of event we need to count. - def summarize_event(event) - if event[:kind] == "feature" - counter_key = { - key: event[:key], - version: event[:version], - variation: event[:variation] - } - c = @counters[counter_key] - if c.nil? - @counters[counter_key] = { - value: event[:value], - default: event[:default], - count: 1 - } - else - c[:count] = c[:count] + 1 - end - time = event[:creationDate] - if !time.nil? - @start_date = time if @start_date == 0 || time < @start_date - @end_date = time if time > @end_date - end - end - end - - # Returns a snapshot of the current summarized event data, and resets this state. - def snapshot - ret = EventSummary.new(@start_date, @end_date, @counters) - ret - end - - def clear - @start_date = 0 - @end_date = 0 - @counters = {} - end - end -end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 7b77c4db..f2b3e9f9 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,5 +1,7 @@ require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/event_sender" +require "ldclient-rb/impl/event_summarizer" +require "ldclient-rb/impl/event_types" require "ldclient-rb/impl/util" require "concurrent" @@ -26,16 +28,33 @@ # module LaunchDarkly - MAX_FLUSH_WORKERS = 5 - USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, - :avatar, :name ] + module EventProcessorMethods + def record_eval_event( + user, + key, + version = nil, + variation = nil, + value = nil, + reason = nil, + default = nil, + track_events = false, + debug_until = nil, + prereq_of = nil + ) + end - private_constant :MAX_FLUSH_WORKERS - private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS + def record_identify_event(user) + end - # @private - class NullEventProcessor - def add_event(event) + def record_custom_event( + user, + key, + data = nil, + metric_value = nil + ) + end + + def record_alias_event(user, previous_user) end def flush @@ -45,12 +64,16 @@ def stop end end + MAX_FLUSH_WORKERS = 5 + USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, + :avatar, :name ] + + private_constant :MAX_FLUSH_WORKERS + private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS + # @private - class EventMessage - def initialize(event) - @event = event - end - attr_reader :event + class NullEventProcessor + include EventProcessorMethods end # @private @@ -90,6 +113,8 @@ class StopMessage < SynchronousMessage # @private class EventProcessor + include EventProcessorMethods + def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test_properties = nil) raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key @logger = config.logger @@ -116,16 +141,46 @@ def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test @stopped = Concurrent::AtomicBoolean.new(false) @inbox_full = Concurrent::AtomicBoolean.new(false) - event_sender = test_properties && test_properties.has_key?(:event_sender) ? - test_properties[:event_sender] : + event_sender = (test_properties || {})[:event_sender] || Impl::EventSender.new(sdk_key, config, client ? client : Util.new_http_client(config.events_uri, config)) + @timestamp_fn = (test_properties || {})[:timestamp_fn] || proc { Impl::Util.current_time_millis } + EventDispatcher.new(@inbox, sdk_key, config, diagnostic_accumulator, event_sender) end - def add_event(event) - event[:creationDate] = Impl::Util.current_time_millis - post_to_inbox(EventMessage.new(event)) + def record_eval_event( + user, + key, + version = nil, + variation = nil, + value = nil, + reason = nil, + default = nil, + track_events = false, + debug_until = nil, + prereq_of = nil + ) + post_to_inbox(LaunchDarkly::Impl::EvalEvent.new(timestamp, user, key, version, variation, value, reason, + default, track_events, debug_until, prereq_of)) + end + + def record_identify_event(user) + post_to_inbox(LaunchDarkly::Impl::IdentifyEvent.new(timestamp, user)) + end + + def record_custom_event(user, key, data = nil, metric_value = nil) + post_to_inbox(LaunchDarkly::Impl::CustomEvent.new(timestamp, user, key, data, metric_value)) + end + + def record_alias_event(user, previous_user) + post_to_inbox(LaunchDarkly::Impl::AliasEvent.new( + timestamp, + user.nil? ? nil : user[:key], + user_to_context_kind(user), + previous_user.nil? ? nil : previous_user[:key], + user_to_context_kind(previous_user) + )) end def flush @@ -155,9 +210,11 @@ def wait_until_inactive sync_msg.wait_for_completion end - private + private def timestamp + @timestamp_fn.call() + end - def post_to_inbox(message) + private def post_to_inbox(message) begin @inbox.push(message, non_block=true) rescue ThreadError @@ -170,6 +227,10 @@ def post_to_inbox(message) end end end + + private def user_to_context_kind(user) + (user.nil? || !user[:anonymous]) ? 'user' : 'anonymousUser' + end end # @private @@ -209,8 +270,6 @@ def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) begin message = inbox.pop case message - when EventMessage - dispatch_event(message.event, outbox) when FlushMessage trigger_flush(outbox, flush_workers) when FlushUsersMessage @@ -224,6 +283,8 @@ def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) do_shutdown(flush_workers, diagnostic_event_workers) running = false message.completed + else + dispatch_event(message, outbox) end rescue => e Util.log_exception(@config.logger, "Unexpected error in event processor", e) @@ -257,11 +318,10 @@ def dispatch_event(event, outbox) # the event (if tracked) and once for debugging. will_add_full_event = false debug_event = nil - if event[:kind] == "feature" - will_add_full_event = event[:trackEvents] + if event.is_a?(LaunchDarkly::Impl::EvalEvent) + will_add_full_event = event.track_events if should_debug_event(event) - debug_event = event.clone - debug_event[:debug] = true + debug_event = LaunchDarkly::Impl::DebugEvent.new(event) end else will_add_full_event = true @@ -270,12 +330,8 @@ def dispatch_event(event, outbox) # For each user we haven't seen before, we add an index event - unless this is already # an identify event for that user. if !(will_add_full_event && @config.inline_users_in_events) - if event.has_key?(:user) && !notice_user(event[:user]) && event[:kind] != "identify" - outbox.add_event({ - kind: "index", - creationDate: event[:creationDate], - user: event[:user] - }) + if !event.user.nil? && !notice_user(event.user) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) + outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.user)) end end @@ -295,7 +351,7 @@ def notice_user(user) end def should_debug_event(event) - debug_until = event[:debugEventsUntilDate] + debug_until = event.debug_until if !debug_until.nil? last_past = @last_known_past_time.value debug_until > last_past && debug_until > Impl::Util.current_time_millis @@ -365,12 +421,11 @@ def initialize(capacity, logger) @capacity_exceeded = false @dropped_events = 0 @events = [] - @summarizer = EventSummarizer.new + @summarizer = LaunchDarkly::Impl::EventSummarizer.new end def add_event(event) if @events.length < @capacity - @logger.debug { "[LDClient] Enqueueing event: #{event.to_json}" } @events.push(event) @capacity_exceeded = false else @@ -404,6 +459,15 @@ def clear # @private class EventOutputFormatter + FEATURE_KIND = 'feature' + IDENTIFY_KIND = 'identify' + CUSTOM_KIND = 'custom' + ALIAS_KIND = 'alias' + INDEX_KIND = 'index' + DEBUG_KIND = 'debug' + SUMMARY_KIND = 'summary' + ANONYMOUS_USER_CONTEXT_KIND = 'anonymousUser' + def initialize(config) @inline_users = config.inline_users_in_events @user_filter = UserFilter.new(config) @@ -418,100 +482,130 @@ def make_output_events(events, summary) events_out end - private - - def process_user(event) - filtered = @user_filter.transform_user_props(event[:user]) - Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS) - end - - def make_output_event(event) - case event[:kind] - when "feature" - is_debug = event[:debug] + private def make_output_event(event) + case event + + when LaunchDarkly::Impl::EvalEvent out = { - kind: is_debug ? "debug" : "feature", - creationDate: event[:creationDate], - key: event[:key], - value: event[:value] + kind: FEATURE_KIND, + creationDate: event.timestamp, + key: event.key, + value: event.value } - out[:default] = event[:default] if event.has_key?(:default) - out[:variation] = event[:variation] if event.has_key?(:variation) - out[:version] = event[:version] if event.has_key?(:version) - out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf) - out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) - if @inline_users || is_debug - out[:user] = process_user(event) - else - out[:userKey] = event[:user][:key] - end - out[:reason] = event[:reason] if !event[:reason].nil? + out[:default] = event.default if !event.default.nil? + out[:variation] = event.variation if !event.variation.nil? + out[:version] = event.version if !event.version.nil? + out[:prereqOf] = event.prereq_of if !event.prereq_of.nil? + set_opt_context_kind(out, event.user) + set_user_or_user_key(out, event.user) + out[:reason] = event.reason if !event.reason.nil? out - when "identify" + + when LaunchDarkly::Impl::IdentifyEvent { - kind: "identify", - creationDate: event[:creationDate], - key: event[:user][:key].to_s, - user: process_user(event) + kind: IDENTIFY_KIND, + creationDate: event.timestamp, + key: event.user[:key].to_s, + user: process_user(event.user) } - when "custom" + + when LaunchDarkly::Impl::CustomEvent out = { - kind: "custom", - creationDate: event[:creationDate], - key: event[:key] + kind: CUSTOM_KIND, + creationDate: event.timestamp, + key: event.key } - out[:data] = event[:data] if event.has_key?(:data) - if @inline_users - out[:user] = process_user(event) - else - out[:userKey] = event[:user][:key] - end - out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) - out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) + out[:data] = event.data if !event.data.nil? + set_user_or_user_key(out, event.user) + out[:metricValue] = event.metric_value if !event.metric_value.nil? + set_opt_context_kind(out, event.user) out - when "index" + + when LaunchDarkly::Impl::AliasEvent + { + kind: ALIAS_KIND, + creationDate: event.timestamp, + key: event.key, + contextKind: event.context_kind, + previousKey: event.previous_key, + previousContextKind: event.previous_context_kind + } + + when LaunchDarkly::Impl::IndexEvent { - kind: "index", - creationDate: event[:creationDate], - user: process_user(event) + kind: INDEX_KIND, + creationDate: event.timestamp, + user: process_user(event.user) } + + when LaunchDarkly::Impl::DebugEvent + original = event.eval_event + out = { + kind: DEBUG_KIND, + creationDate: original.timestamp, + key: original.key, + user: process_user(original.user), + value: original.value + } + out[:default] = original.default if !original.default.nil? + out[:variation] = original.variation if !original.variation.nil? + out[:version] = original.version if !original.version.nil? + out[:prereqOf] = original.prereq_of if !original.prereq_of.nil? + set_opt_context_kind(out, original.user) + out[:reason] = original.reason if !original.reason.nil? + out + else - event + nil end end # Transforms the summary data into the format used for event sending. - def make_summary_event(summary) + private def make_summary_event(summary) flags = {} - summary[:counters].each { |ckey, cval| - flag = flags[ckey[:key]] - if flag.nil? - flag = { - default: cval[:default], - counters: [] - } - flags[ckey[:key]] = flag - end - c = { - value: cval[:value], - count: cval[:count] - } - if !ckey[:variation].nil? - c[:variation] = ckey[:variation] - end - if ckey[:version].nil? - c[:unknown] = true - else - c[:version] = ckey[:version] + summary.counters.each do |flagKey, flagInfo| + counters = [] + flagInfo.versions.each do |version, variations| + variations.each do |variation, counter| + c = { + value: counter.value, + count: counter.count + } + c[:variation] = variation if !variation.nil? + if version.nil? + c[:unknown] = true + else + c[:version] = version + end + counters.push(c) + end end - flag[:counters].push(c) - } + flags[flagKey] = { default: flagInfo.default, counters: counters } + end { - kind: "summary", + kind: SUMMARY_KIND, startDate: summary[:start_date], endDate: summary[:end_date], features: flags } end + + private def set_opt_context_kind(out, user) + out[:contextKind] = ANONYMOUS_USER_CONTEXT_KIND if !user.nil? && user[:anonymous] + end + + private def set_user_or_user_key(out, user) + if @inline_users + out[:user] = process_user(user) + else + key = user[:key] + out[:userKey] = key.is_a?(String) ? key : key.to_s + end + end + + private def process_user(user) + filtered = @user_filter.transform_user_props(user) + Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS) + end end end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 9e10c8ef..ed94719e 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -4,6 +4,13 @@ module LaunchDarkly module Impl + # Used internally to record that we evaluated a prerequisite flag. + PrerequisiteEvalRecord = Struct.new( + :prereq_flag, # the prerequisite flag that we evaluated + :prereq_of_flag, # the flag that it was a prerequisite of + :detail # the EvaluationDetail representing the evaluation result + ) + # Encapsulates the feature flag evaluation logic. The Evaluator has no knowledge of the rest of the SDK environment; # if it needs to retrieve flags or segments that are referenced by a flag, it does so through a simple function that # is provided in the constructor. It also produces feature requests as appropriate for any referenced prerequisite @@ -22,7 +29,7 @@ def initialize(get_flag, get_segment, get_big_segments_membership, logger) @get_big_segments_membership = get_big_segments_membership @logger = logger end - + # Used internally to hold an evaluation result and additional state that may be accumulated during an # evaluation. It's simpler and a bit more efficient to represent these as mutable properties rather than # trying to use a pure functional approach, and since we're not exposing this object to any application code @@ -34,7 +41,7 @@ def initialize(get_flag, get_segment, get_big_segments_membership, logger) # evaluation. EvalResult = Struct.new( :detail, # the EvaluationDetail representing the evaluation result - :events, # an array of evaluation events generated by prerequisites, or nil + :prereq_evals, # an array of PrerequisiteEvalRecord instances, or nil :big_segments_status, :big_segments_membership ) @@ -50,17 +57,15 @@ def self.error_result(errorKind, value = nil) # # @param flag [Object] the flag # @param user [Object] the user properties - # @param event_factory [EventFactory] called to construct a feature request event when a prerequisite flag is - # evaluated; the caller is responsible for constructing the feature event for the top-level evaluation # @return [EvalResult] the evaluation result - def evaluate(flag, user, event_factory) + def evaluate(flag, user) result = EvalResult.new if user.nil? || user[:key].nil? result.detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED) return result end - detail = eval_internal(flag, user, result, event_factory) + detail = eval_internal(flag, user, result) if !result.big_segments_status.nil? # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at # some point and we will want to include the status in the evaluation reason. @@ -80,12 +85,12 @@ def self.make_big_segment_ref(segment) # method is visible for testing private - def eval_internal(flag, user, state, event_factory) + def eval_internal(flag, user, state) if !flag[:on] return get_off_value(flag, EvaluationReason::off) end - prereq_failure_reason = check_prerequisites(flag, user, state, event_factory) + prereq_failure_reason = check_prerequisites(flag, user, state) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason) end @@ -118,7 +123,7 @@ def eval_internal(flag, user, state, event_factory) return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end - def check_prerequisites(flag, user, state, event_factory) + def check_prerequisites(flag, user, state) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -129,15 +134,15 @@ def check_prerequisites(flag, user, state, event_factory) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, state, event_factory) + prereq_res = eval_internal(prereq_flag, user, state) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end - event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) - state.events = [] if state.events.nil? - state.events.push(event) + prereq_eval = PrerequisiteEvalRecord.new(prereq_flag, flag, prereq_res) + state.prereq_evals = [] if state.prereq_evals.nil? + state.prereq_evals.push(prereq_eval) rescue => exn Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb deleted file mode 100644 index 19b4e474..00000000 --- a/lib/ldclient-rb/impl/event_factory.rb +++ /dev/null @@ -1,123 +0,0 @@ - -module LaunchDarkly - module Impl - # Event constructors are centralized here to avoid mistakes and repetitive logic. - # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons - # in the events (for when variation_detail is called) and one that doesn't. - # - # Note that these methods do not set the "creationDate" property, because in the Ruby client, - # that is done by EventProcessor.add_event(). - class EventFactory - def initialize(with_reasons) - @with_reasons = with_reasons - end - - def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) - add_experiment_data = self.class.is_experiment(flag, detail.reason) - e = { - kind: 'feature', - key: flag[:key], - user: user, - variation: detail.variation_index, - value: detail.value, - default: default_value, - version: flag[:version] - } - # the following properties are handled separately so we don't waste bandwidth on unused keys - e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] - e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil? - e[:reason] = detail.reason if add_experiment_data || @with_reasons - e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] - e - end - - def new_default_event(flag, user, default_value, reason) - e = { - kind: 'feature', - key: flag[:key], - user: user, - value: default_value, - default: default_value, - version: flag[:version] - } - e[:trackEvents] = true if flag[:trackEvents] - e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - e[:reason] = reason if @with_reasons - e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] - e - end - - def new_unknown_flag_event(key, user, default_value, reason) - e = { - kind: 'feature', - key: key, - user: user, - value: default_value, - default: default_value - } - e[:reason] = reason if @with_reasons - e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] - e - end - - def new_identify_event(user) - { - kind: 'identify', - key: user[:key], - user: user - } - end - - def new_alias_event(current_context, previous_context) - { - kind: 'alias', - key: current_context[:key], - contextKind: context_to_context_kind(current_context), - previousKey: previous_context[:key], - previousContextKind: context_to_context_kind(previous_context) - } - end - - def new_custom_event(event_name, user, data, metric_value) - e = { - kind: 'custom', - key: event_name, - user: user - } - e[:data] = data if !data.nil? - e[:metricValue] = metric_value if !metric_value.nil? - e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] - e - end - - def self.is_experiment(flag, reason) - return false if !reason - - if reason.in_experiment - return true - end - - case reason[:kind] - when 'RULE_MATCH' - index = reason[:ruleIndex] - if !index.nil? - rules = flag[:rules] || [] - return index >= 0 && index < rules.length && rules[index][:trackEvents] - end - when 'FALLTHROUGH' - return !!flag[:trackEventsFallthrough] - end - false - end - - private def context_to_context_kind(user) - if !user.nil? && user[:anonymous] - return "anonymousUser" - else - return "user" - end - end - end - end -end diff --git a/lib/ldclient-rb/impl/event_summarizer.rb b/lib/ldclient-rb/impl/event_summarizer.rb new file mode 100644 index 00000000..5c9dcc1a --- /dev/null +++ b/lib/ldclient-rb/impl/event_summarizer.rb @@ -0,0 +1,63 @@ +require "ldclient-rb/impl/event_types" + +module LaunchDarkly + module Impl + EventSummary = Struct.new(:start_date, :end_date, :counters) + + EventSummaryFlagInfo = Struct.new(:default, :versions) + + EventSummaryFlagVariationCounter = Struct.new(:value, :count) + + # Manages the state of summarizable information for the EventProcessor, including the + # event counters and user deduplication. Note that the methods of this class are + # deliberately not thread-safe; the EventProcessor is responsible for enforcing + # synchronization across both the summarizer and the event queue. + class EventSummarizer + class Counter + end + + def initialize + clear + end + + # Adds this event to our counters, if it is a type of event we need to count. + def summarize_event(event) + return if !event.is_a?(LaunchDarkly::Impl::EvalEvent) + + counters_for_flag = @counters[event.key] + if counters_for_flag.nil? + counters_for_flag = EventSummaryFlagInfo.new(event.default, Hash.new) + @counters[event.key] = counters_for_flag + end + counters_for_flag_version = counters_for_flag.versions[event.version] + if counters_for_flag_version.nil? + counters_for_flag_version = Hash.new + counters_for_flag.versions[event.version] = counters_for_flag_version + end + variation_counter = counters_for_flag_version[event.variation] + if variation_counter.nil? + counters_for_flag_version[event.variation] = EventSummaryFlagVariationCounter.new(event.value, 1) + else + variation_counter.count = variation_counter.count + 1 + end + time = event.timestamp + if !time.nil? + @start_date = time if @start_date == 0 || time < @start_date + @end_date = time if time > @end_date + end + end + + # Returns a snapshot of the current summarized event data, and resets this state. + def snapshot + ret = EventSummary.new(@start_date, @end_date, @counters) + ret + end + + def clear + @start_date = 0 + @end_date = 0 + @counters = {} + end + end + end +end diff --git a/lib/ldclient-rb/impl/event_types.rb b/lib/ldclient-rb/impl/event_types.rb new file mode 100644 index 00000000..6ca043ba --- /dev/null +++ b/lib/ldclient-rb/impl/event_types.rb @@ -0,0 +1,90 @@ +module LaunchDarkly + module Impl + class Event + def initialize(timestamp, user) + @timestamp = timestamp + @user = user + end + + attr_reader :timestamp + attr_reader :kind + attr_reader :user + end + + class EvalEvent < Event + def initialize(timestamp, user, key, version = nil, variation = nil, value = nil, reason = nil, default = nil, + track_events = false, debug_until = nil, prereq_of = nil) + super(timestamp, user) + @key = key + @version = version + @variation = variation + @value = value + @reason = reason + @default = default + # avoid setting rarely-used attributes if they have no value - this saves a little space per instance + @track_events = track_events if track_events + @debug_until = debug_until if debug_until + @prereq_of = prereq_of if prereq_of + end + + attr_reader :key + attr_reader :version + attr_reader :variation + attr_reader :value + attr_reader :reason + attr_reader :default + attr_reader :track_events + attr_reader :debug_until + attr_reader :prereq_of + end + + class IdentifyEvent < Event + def initialize(timestamp, user) + super(timestamp, user) + end + end + + class CustomEvent < Event + def initialize(timestamp, user, key, data = nil, metric_value = nil) + super(timestamp, user) + @key = key + @data = data if !data.nil? + @metric_value = metric_value if !metric_value.nil? + end + + attr_reader :key + attr_reader :data + attr_reader :metric_value + end + + class AliasEvent < Event + def initialize(timestamp, key, context_kind, previous_key, previous_context_kind) + super(timestamp, nil) + @key = key + @context_kind = context_kind + @previous_key = previous_key + @previous_context_kind = previous_context_kind + end + + attr_reader :key + attr_reader :context_kind + attr_reader :previous_key + attr_reader :previous_context_kind + end + + class IndexEvent < Event + def initialize(timestamp, user) + super(timestamp, user) + end + end + + class DebugEvent < Event + def initialize(eval_event) + super(eval_event.timestamp, eval_event.user) + @eval_event = eval_event + end + + attr_reader :eval_event + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index b5e5ead9..70dc6210 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,7 +1,6 @@ require "ldclient-rb/impl/big_segments" require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/evaluator" -require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" @@ -46,9 +45,6 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key - @event_factory_default = EventFactory.new(false) - @event_factory_with_reasons = EventFactory.new(true) - # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses # the feature store through the Config object, so we need to make a new Config that uses @@ -202,7 +198,7 @@ def initialized? # @return the variation to show the user, or the default value if there's an an error # def variation(key, user, default) - evaluate_internal(key, user, default, @event_factory_default).value + evaluate_internal(key, user, default, false).value end # @@ -229,7 +225,7 @@ def variation(key, user, default) # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) - evaluate_internal(key, user, default, @event_factory_with_reasons) + evaluate_internal(key, user, default, true) end # @@ -253,7 +249,7 @@ def identify(user) return end sanitize_user(user) - @event_processor.add_event(@event_factory_default.new_identify_event(user)) + @event_processor.record_identify_event(user) end # @@ -284,7 +280,7 @@ def track(event_name, user, data = nil, metric_value = nil) return end sanitize_user(user) - @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data, metric_value)) + @event_processor.record_custom_event(user, event_name, data, metric_value) end # @@ -301,7 +297,7 @@ def alias(current_context, previous_context) end sanitize_user(current_context) sanitize_user(previous_context) - @event_processor.add_event(@event_factory_default.new_alias_event(current_context, previous_context)) + @event_processor.record_alias_event(current_context, previous_context) end # @@ -368,13 +364,13 @@ def all_flags_state(user, options={}) next end begin - detail = @evaluator.evaluate(f, user, @event_factory_default).detail + detail = @evaluator.evaluate(f, user).detail rescue => exn detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION)) Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) end - requires_experiment_data = EventFactory.is_experiment(f, detail.reason) + requires_experiment_data = is_experiment(f, detail.reason) flag_state = { key: f[:key], value: detail.value, @@ -430,7 +426,7 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) end # @return [EvaluationDetail] - def evaluate_internal(key, user, default, event_factory) + def evaluate_internal(key, user, default, with_reasons) if @config.offline? return Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) end @@ -453,7 +449,7 @@ def evaluate_internal(key, user, default, event_factory) else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) - @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) + record_unknown_flag_eval(key, user, default, detail.reason, with_reasons) return detail end end @@ -463,32 +459,94 @@ def evaluate_internal(key, user, default, event_factory) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_FLAG_NOT_FOUND, default) - @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) + record_unknown_flag_eval(key, user, default, detail.reason, with_reasons) return detail end begin - res = @evaluator.evaluate(feature, user, event_factory) - if !res.events.nil? - res.events.each do |event| - @event_processor.add_event(event) + res = @evaluator.evaluate(feature, user) + if !res.prereq_evals.nil? + res.prereq_evals.each do |prereq_eval| + record_prereq_flag_eval(prereq_eval.prereq_flag, prereq_eval.prereq_of_flag, user, prereq_eval.detail, with_reasons) end end detail = res.detail if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end - @event_processor.add_event(event_factory.new_eval_event(feature, user, detail, default)) + record_flag_eval(feature, user, detail, default, with_reasons) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) - @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) + record_flag_eval_error(feature, user, default, detail.reason, with_reasons) return detail end end - def sanitize_user(user) + private def record_flag_eval(flag, user, detail, default, with_reasons) + add_experiment_data = is_experiment(flag, detail.reason) + @event_processor.record_eval_event( + user, + flag[:key], + flag[:version], + detail.variation_index, + detail.value, + (add_experiment_data || with_reasons) ? detail.reason : nil, + default, + add_experiment_data || flag[:trackEvents] || false, + flag[:debugEventsUntilDate], + nil + ) + end + + private def record_prereq_flag_eval(prereq_flag, prereq_of_flag, user, detail, with_reasons) + add_experiment_data = is_experiment(prereq_flag, detail.reason) + @event_processor.record_eval_event( + user, + prereq_flag[:key], + prereq_flag[:version], + detail.variation_index, + detail.value, + (add_experiment_data || with_reasons) ? detail.reason : nil, + nil, + add_experiment_data || prereq_flag[:trackEvents] || false, + prereq_flag[:debugEventsUntilDate], + prereq_of_flag[:key] + ) + end + + private def record_flag_eval_error(flag, user, default, reason, with_reasons) + @event_processor.record_eval_event(user, flag[:key], flag[:version], nil, default, with_reasons ? reason : nil, default, + flag[:trackEvents], flag[:debugEventsUntilDate], nil) + end + + private def record_unknown_flag_eval(flag_key, user, default, reason, with_reasons) + @event_processor.record_eval_event(user, flag_key, nil, nil, default, with_reasons ? reason : nil, default, + false, nil, nil) + end + + private def is_experiment(flag, reason) + return false if !reason + + if reason.in_experiment + return true + end + + case reason[:kind] + when 'RULE_MATCH' + index = reason[:ruleIndex] + if !index.nil? + rules = flag[:rules] || [] + return index >= 0 && index < rules.length && rules[index][:trackEvents] + end + when 'FALLTHROUGH' + return !!flag[:trackEventsFallthrough] + end + false + end + + private def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s end diff --git a/spec/event_summarizer_spec.rb b/spec/event_summarizer_spec.rb deleted file mode 100644 index 5449e691..00000000 --- a/spec/event_summarizer_spec.rb +++ /dev/null @@ -1,63 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::EventSummarizer do - subject { LaunchDarkly::EventSummarizer } - - let(:user) { { key: "key" } } - - it "does not add identify event to summary" do - es = subject.new - snapshot = es.snapshot - es.summarize_event({ kind: "identify", user: user }) - - expect(es.snapshot).to eq snapshot - end - - it "does not add custom event to summary" do - es = subject.new - snapshot = es.snapshot - es.summarize_event({ kind: "custom", key: "whatever", user: user }) - - expect(es.snapshot).to eq snapshot - end - - it "tracks start and end dates" do - es = subject.new - flag = { key: "key" } - event1 = { kind: "feature", creationDate: 2000, user: user } - event2 = { kind: "feature", creationDate: 1000, user: user } - event3 = { kind: "feature", creationDate: 1500, user: user } - es.summarize_event(event1) - es.summarize_event(event2) - es.summarize_event(event3) - data = es.snapshot - - expect(data.start_date).to be 1000 - expect(data.end_date).to be 2000 - end - - it "counts events" do - es = subject.new - flag1 = { key: "key1", version: 11 } - flag2 = { key: "key2", version: 22 } - event1 = { kind: "feature", key: "key1", version: 11, user: user, variation: 1, value: "value1", default: "default1" } - event2 = { kind: "feature", key: "key1", version: 11, user: user, variation: 2, value: "value2", default: "default1" } - event3 = { kind: "feature", key: "key2", version: 22, user: user, variation: 1, value: "value99", default: "default2" } - event4 = { kind: "feature", key: "key1", version: 11, user: user, variation: 1, value: "value1", default: "default1" } - event5 = { kind: "feature", key: "badkey", user: user, variation: nil, value: "default3", default: "default3" } - [event1, event2, event3, event4, event5].each { |e| es.summarize_event(e) } - data = es.snapshot - - expectedCounters = { - { key: "key1", version: 11, variation: 1 } => - { count: 2, value: "value1", default: "default1" }, - { key: "key1", version: 11, variation: 2 } => - { count: 1, value: "value2", default: "default1" }, - { key: "key2", version: 22, variation: 1 } => - { count: 1, value: "value99", default: "default2" }, - { key: "badkey", version: nil, variation: nil } => - { count: 1, value: "default3", default: "default3" } - } - expect(data.counters).to eq expectedCounters - end -end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index e9a6d6ff..894c3f70 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -1,3 +1,6 @@ +require "ldclient-rb/impl/event_types" + +require "events_test_util" require "http_util" require "spec_helper" require "time" @@ -5,6 +8,7 @@ describe LaunchDarkly::EventProcessor do subject { LaunchDarkly::EventProcessor } + let(:starting_timestamp) { 1000 } let(:default_config_opts) { { diagnostic_opt_out: true, logger: $null_log } } let(:default_config) { LaunchDarkly::Config.new(default_config_opts) } let(:user) { { key: "userkey", name: "Red" } } @@ -16,7 +20,15 @@ def with_processor_and_sender(config) sender = FakeEventSender.new - ep = subject.new("sdk_key", config, nil, nil, { event_sender: sender }) + timestamp = starting_timestamp + ep = subject.new("sdk_key", config, nil, nil, { + event_sender: sender, + timestamp_fn: proc { + t = timestamp + timestamp += 1 + t + } + }) begin yield ep, sender ensure @@ -26,59 +38,41 @@ def with_processor_and_sender(config) it "queues identify event" do with_processor_and_sender(default_config) do |ep, sender| - e = { kind: "identify", key: user[:key], user: user } - ep.add_event(e) + ep.record_identify_event(user) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(e) + expect(output).to contain_exactly(eq(identify_event(user))) end end it "filters user in identify event" do config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) with_processor_and_sender(config) do |ep, sender| - e = { kind: "identify", key: user[:key], user: user } - ep.add_event(e) + ep.record_identify_event(user) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly({ - kind: "identify", - key: user[:key], - creationDate: e[:creationDate], - user: filtered_user - }) + expect(output).to contain_exactly(eq(identify_event(filtered_user))) end end it "stringifies built-in user attributes in identify event" do with_processor_and_sender(default_config) do |ep, sender| - flag = { key: "flagkey", version: 11 } - e = { kind: "identify", key: numeric_user[:key], user: numeric_user } - ep.add_event(e) + ep.record_identify_event(numeric_user) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - kind: "identify", - key: numeric_user[:key].to_s, - creationDate: e[:creationDate], - user: stringified_numeric_user - ) + expect(output).to contain_exactly(eq(identify_event(stringified_numeric_user))) end end it "queues individual feature event with index event" do with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, false, nil)), + eq(index_event(user)), + eq(feature_event(flag, user, 1, 'value')), include(:kind => "summary") ) end @@ -88,16 +82,12 @@ def with_processor_and_sender(config) config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe, filtered_user)), - eq(feature_event(fe, flag, false, nil)), + eq(index_event(filtered_user)), + eq(feature_event(flag, user, 1, 'value')), include(:kind => "summary") ) end @@ -106,16 +96,12 @@ def with_processor_and_sender(config) it "stringifies built-in user attributes in index event" do with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: numeric_user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe) + ep.record_eval_event(numeric_user, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe, stringified_numeric_user)), - eq(feature_event(fe, flag, false, nil)), + eq(index_event(stringified_numeric_user)), + eq(feature_event(flag, stringified_numeric_user, 1, 'value')), include(:kind => "summary") ) end @@ -125,15 +111,11 @@ def with_processor_and_sender(config) config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, user)), + eq(feature_event(flag, user, 1, 'value', true)), include(:kind => "summary") ) end @@ -143,15 +125,11 @@ def with_processor_and_sender(config) config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: numeric_user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe) + ep.record_eval_event(numeric_user, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, stringified_numeric_user)), + eq(feature_event(flag, stringified_numeric_user, 1, 'value', true)), include(:kind => "summary") ) end @@ -161,15 +139,11 @@ def with_processor_and_sender(config) config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, filtered_user)), + eq(feature_event(flag, filtered_user, 1, 'value', true)), include(:kind => "summary") ) end @@ -179,15 +153,11 @@ def with_processor_and_sender(config) config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe, user)), + eq(index_event(user)), include(:kind => "summary") ) end @@ -197,16 +167,12 @@ def with_processor_and_sender(config) with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: future_time - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false, future_time) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, true, user)), + eq(index_event(user)), + eq(debug_event(flag, user, 1, 'value')), include(:kind => "summary") ) end @@ -216,17 +182,13 @@ def with_processor_and_sender(config) with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true, debugEventsUntilDate: future_time - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true, future_time) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, false, nil)), - eq(feature_event(fe, flag, true, user)), + eq(index_event(user)), + eq(feature_event(flag, user, 1, 'value')), + eq(debug_event(flag, user, 1, 'value')), include(:kind => "summary") ) end @@ -239,18 +201,15 @@ def with_processor_and_sender(config) # Send and flush an event we don't care about, just to set the last server time sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) - ep.add_event({ kind: "identify", user: user }) + + ep.record_identify_event(user) flush_and_get_events(ep, sender) # Now send an event with debug mode on, with a "debug until" time that is further in # the future than the server time, but in the past compared to the client. flag = { key: "flagkey", version: 11 } debug_until = (server_time.to_f * 1000).to_i + 1000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false, debug_until) # Should get a summary event only, not a full feature event output = flush_and_get_events(ep, sender) @@ -267,18 +226,14 @@ def with_processor_and_sender(config) # Send and flush an event we don't care about, just to set the last server time sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) - ep.add_event({ kind: "identify", user: user }) + ep.record_identify_event(user) flush_and_get_events(ep, sender) # Now send an event with debug mode on, with a "debug until" time that is further in # the future than the server time, but in the past compared to the client. flag = { key: "flagkey", version: 11 } debug_until = (server_time.to_f * 1000).to_i - 1000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until - } - ep.add_event(fe) + ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false, debug_until) # Should get a summary event only, not a full feature event output = flush_and_get_events(ep, sender) @@ -293,22 +248,14 @@ def with_processor_and_sender(config) flag1 = { key: "flagkey1", version: 11 } flag2 = { key: "flagkey2", version: 22 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe1 = { - kind: "feature", key: "flagkey1", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - fe2 = { - kind: "feature", key: "flagkey2", version: 22, user: user, - variation: 1, value: "value", trackEvents: true - } - ep.add_event(fe1) - ep.add_event(fe2) + ep.record_eval_event(user, 'flagkey1', 11, 1, 'value', nil, nil, true) + ep.record_eval_event(user, 'flagkey2', 22, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe1, user)), - eq(feature_event(fe1, flag1, false, nil)), - eq(feature_event(fe2, flag2, false, nil)), + eq(index_event(user)), + eq(feature_event(flag1, user, 1, 'value', false, starting_timestamp)), + eq(feature_event(flag2, user, 1, 'value', false, starting_timestamp + 1)), include(:kind => "summary") ) end @@ -319,24 +266,16 @@ def with_processor_and_sender(config) flag1 = { key: "flagkey1", version: 11 } flag2 = { key: "flagkey2", version: 22 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe1 = { - kind: "feature", key: "flagkey1", version: 11, user: user, - variation: 1, value: "value1", default: "default1" - } - fe2 = { - kind: "feature", key: "flagkey2", version: 22, user: user, - variation: 2, value: "value2", default: "default2" - } - ep.add_event(fe1) - ep.add_event(fe2) + ep.record_eval_event(user, 'flagkey1', 11, 1, 'value1', nil, 'default1', false) + ep.record_eval_event(user, 'flagkey2', 22, 2, 'value2', nil, 'default2', false) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(fe1, user)), + eq(index_event(user)), eq({ kind: "summary", - startDate: fe1[:creationDate], - endDate: fe2[:creationDate], + startDate: starting_timestamp, + endDate: starting_timestamp + 1, features: { flagkey1: { default: "default1", @@ -358,13 +297,12 @@ def with_processor_and_sender(config) it "queues custom event with user" do with_processor_and_sender(default_config) do |ep, sender| - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } - ep.add_event(e) + ep.record_custom_event(user, 'eventkey', { thing: 'stuff' }, 1.5) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(e, user)), - eq(custom_event(e, nil)) + eq(index_event(user)), + eq(custom_event(user, 'eventkey', { thing: 'stuff' }, 1.5)) ) end end @@ -372,12 +310,11 @@ def with_processor_and_sender(config) it "can include inline user in custom event" do config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } - ep.add_event(e) + ep.record_custom_event(user, 'eventkey') output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(custom_event(e, user)) + eq(custom_event(user, 'eventkey', nil, nil, true)) ) end end @@ -385,12 +322,11 @@ def with_processor_and_sender(config) it "filters user in custom event" do config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } - ep.add_event(e) + ep.record_custom_event(user, 'eventkey') output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(custom_event(e, filtered_user)) + eq(custom_event(filtered_user, 'eventkey', nil, nil, true)) ) end end @@ -398,46 +334,49 @@ def with_processor_and_sender(config) it "stringifies built-in user attributes in custom event" do config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| - e = { kind: "custom", key: "eventkey", user: numeric_user } - ep.add_event(e) + ep.record_custom_event(numeric_user, 'eventkey', nil, nil) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(custom_event(e, stringified_numeric_user)) + eq(custom_event(stringified_numeric_user, 'eventkey', nil, nil, true)) ) end end it "queues alias event" do with_processor_and_sender(default_config) do |ep, sender| - e = { kind: "alias", key: "a", contextKind: "user", previousKey: "b", previousContextKind: "user" } - ep.add_event(e) + ep.record_alias_event({ key: 'a' }, { key: 'b', anonymous: true }) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(e) + expect(output).to contain_exactly({ + creationDate: starting_timestamp, + kind: 'alias', + key: 'a', + contextKind: 'user', + previousKey: 'b', + previousContextKind: 'anonymousUser' + }) end end it "treats nil value for custom the same as an empty hash" do with_processor_and_sender(default_config) do |ep, sender| user_with_nil_custom = { key: "userkey", custom: nil } - e = { kind: "identify", key: "userkey", user: user_with_nil_custom } - ep.add_event(e) + ep.record_identify_event(user_with_nil_custom) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(e) + expect(output).to contain_exactly(eq(identify_event(user_with_nil_custom))) end end it "does a final flush when shutting down" do with_processor_and_sender(default_config) do |ep, sender| - e = { kind: "identify", key: user[:key], user: user } - ep.add_event(e) + ep.record_identify_event(user) ep.stop output = sender.analytics_payloads.pop - expect(output).to contain_exactly(e) + expect(output).to contain_exactly(eq(identify_event(user))) end end @@ -452,12 +391,10 @@ def with_processor_and_sender(config) it "stops posting events after unrecoverable error" do with_processor_and_sender(default_config) do |ep, sender| sender.result = LaunchDarkly::Impl::EventSenderResult.new(false, true, nil) - e = { kind: "identify", key: user[:key], user: user } - ep.add_event(e) + e = ep.record_identify_event(user) flush_and_get_events(ep, sender) - e = { kind: "identify", key: user[:key], user: user } - ep.add_event(e) + ep.record_identify_event(user) ep.flush ep.wait_until_inactive expect(sender.analytics_payloads.empty?).to be true @@ -510,9 +447,9 @@ def with_diagnostic_processor_and_sender(config) with_diagnostic_processor_and_sender(config) do |ep, sender| init_event = sender.diagnostic_payloads.pop - ep.add_event({ kind: 'identify', user: user }) - ep.add_event({ kind: 'identify', user: user }) - ep.add_event({ kind: 'identify', user: user }) + 3.times do + ep.record_identify_event(user) + end flush_and_get_events(ep, sender) periodic_event = sender.diagnostic_payloads.pop @@ -528,8 +465,8 @@ def with_diagnostic_processor_and_sender(config) with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| init_event = sender.diagnostic_payloads.pop - ep.add_event({ kind: 'custom', key: 'event1', user: user }) - ep.add_event({ kind: 'custom', key: 'event2', user: user }) + ep.record_custom_event(user, 'event1') + ep.record_custom_event(user, 'event2') events = flush_and_get_events(ep, sender) periodic_event = sender.diagnostic_payloads.pop @@ -541,44 +478,66 @@ def with_diagnostic_processor_and_sender(config) end end - def index_event(e, user) + def index_event(user, timestamp = starting_timestamp) { kind: "index", - creationDate: e[:creationDate], + creationDate: timestamp, user: user } end - def feature_event(e, flag, debug, inline_user) + def identify_event(user, timestamp = starting_timestamp) + { + kind: "identify", + creationDate: timestamp, + key: user[:key], + user: user + } + end + + def feature_event(flag, user, variation, value, inline_user = false, timestamp = starting_timestamp) out = { - kind: debug ? "debug" : "feature", - creationDate: e[:creationDate], + kind: 'feature', + creationDate: timestamp, key: flag[:key], - variation: e[:variation], + variation: variation, version: flag[:version], - value: e[:value] + value: value } - if inline_user.nil? - out[:userKey] = e[:user][:key] + if inline_user + out[:user] = user else - out[:user] = inline_user + out[:userKey] = user[:key] end out end - def custom_event(e, inline_user) + def debug_event(flag, user, variation, value, timestamp = starting_timestamp) + out = { + kind: 'debug', + creationDate: timestamp, + key: flag[:key], + variation: variation, + version: flag[:version], + value: value, + user: user + } + out + end + + def custom_event(user, key, data, metric_value, inline_user = false, timestamp = starting_timestamp) out = { kind: "custom", - creationDate: e[:creationDate], - key: e[:key] + creationDate: timestamp, + key: key } - out[:data] = e[:data] if e.has_key?(:data) - if inline_user.nil? - out[:userKey] = e[:user][:key] + out[:data] = data if !data.nil? + if inline_user + out[:user] = user else - out[:user] = inline_user + out[:userKey] = user[:key] end - out[:metricValue] = e[:metricValue] if e.has_key?(:metricValue) + out[:metricValue] = metric_value if !metric_value.nil? out end diff --git a/spec/events_test_util.rb b/spec/events_test_util.rb new file mode 100644 index 00000000..66b5b97d --- /dev/null +++ b/spec/events_test_util.rb @@ -0,0 +1,19 @@ +require "ldclient-rb/impl/event_types" + +def make_eval_event(timestamp, user, key, version = nil, variation = nil, value = nil, reason = nil, + default = nil, track_events = false, debug_until = nil, prereq_of = nil) + LaunchDarkly::Impl::EvalEvent.new(timestamp, user, key, version, variation, value, reason, + default, track_events, debug_until, prereq_of) +end + +def make_identify_event(timestamp, user) + LaunchDarkly::Impl::IdentifyEvent.new(timestamp, user) +end + +def make_custom_event(timestamp, user, key, data = nil, metric_value = nil) + LaunchDarkly::Impl::CustomEvent.new(timestamp, user, key, data, metric_value) +end + +def make_alias_event(timestamp, key, context_kind, previous_key, previous_context_kind) + LaunchDarkly::Impl::AliasEvent.new(timestamp, key, context_kind, previous_key, previous_context_kind) +end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index b8a9e2e4..32db7d79 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -20,7 +20,7 @@ module Impl with_segment(segment). build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) end @@ -36,7 +36,7 @@ module Impl with_segment(segment). build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) end @@ -53,7 +53,7 @@ module Impl with_big_segment_for_user(user, segment, true). build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end @@ -73,7 +73,7 @@ module Impl with_big_segment_for_user(user, segment, nil). build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end @@ -93,7 +93,7 @@ module Impl with_big_segment_for_user(user, segment, false). build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end @@ -111,7 +111,7 @@ module Impl with_big_segments_status(BigSegmentsStatus::STALE). build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) end @@ -149,7 +149,7 @@ module Impl # The membership deliberately does not include segment1, because we want the first rule to be # a non-match so that it will continue on and check segment2 as well. - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb index a90a5499..2b76505d 100644 --- a/spec/impl/evaluator_clause_spec.rb +++ b/spec/impl/evaluator_clause_spec.rb @@ -10,28 +10,28 @@ module Impl user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + expect(basic_evaluator.evaluate(flag, user).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + expect(basic_evaluator.evaluate(flag, user).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + expect(basic_evaluator.evaluate(flag, user).detail.value).to be false end it "returns false for unknown operator" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'unknown', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + expect(basic_evaluator.evaluate(flag, user).detail.value).to be false end it "does not stop evaluating rules after clause with unknown operator" do @@ -41,14 +41,14 @@ module Impl clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } rule1 = { clauses: [ clause1 ], variation: 1 } flag = boolean_flag_with_rules([rule0, rule1]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + expect(basic_evaluator.evaluate(flag, user).detail.value).to be true end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + expect(basic_evaluator.evaluate(flag, user).detail.value).to be false end end end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 7299decb..6a6b9310 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -11,9 +11,9 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "reuses rule match reason instances if possible" do @@ -22,8 +22,8 @@ module Impl Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached rule match reason user = { key: 'userkey' } detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result1 = basic_evaluator.evaluate(flag, user, factory) - result2 = basic_evaluator.evaluate(flag, user, factory) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) expect(result1.detail.reason.rule_id).to eq 'ruleid' expect(result1.detail.reason).to be result2.detail.reason end @@ -34,9 +34,9 @@ module Impl user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if rule variation is negative" do @@ -45,9 +45,9 @@ module Impl user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if rule has neither variation nor rollout" do @@ -56,9 +56,9 @@ module Impl user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if rule has a rollout with no variations" do @@ -68,16 +68,16 @@ module Impl user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "coerces user key to a string for evaluation" do clause = { attribute: 'key', op: 'in', values: ['999'] } flag = boolean_flag_with_clauses([clause]) user = { key: 999 } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.value).to eq(true) end @@ -88,7 +88,7 @@ module Impl rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) end @@ -98,7 +98,7 @@ module Impl rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -108,7 +108,7 @@ module Impl rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -118,7 +118,7 @@ module Impl rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 5cd85148..bb526b7c 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -10,7 +10,7 @@ def test_segment_match(segment) clause = make_segment_match_clause(segment) flag = boolean_flag_with_clauses([clause]) e = EvaluatorBuilder.new(logger).with_segment(segment).build - e.evaluate(flag, user, factory).detail.value + e.evaluate(flag, user).detail.value end it "retrieves segment from segment store for segmentMatch operator" do @@ -22,14 +22,14 @@ def test_segment_match(segment) } e = EvaluatorBuilder.new(logger).with_segment(segment).build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - expect(e.evaluate(flag, user, factory).detail.value).to be true + expect(e.evaluate(flag, user).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(e.evaluate(flag, user, factory).detail.value).to be false + expect(e.evaluate(flag, user).detail.value).to be false end it 'explicitly includes user' do diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 15766866..20b231fb 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -1,3 +1,4 @@ +require "events_test_util" require "spec_helper" require "impl/evaluator_spec_base" @@ -17,9 +18,9 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::off) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns nil if flag is off and off variation is unspecified" do @@ -31,9 +32,9 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::off) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if off variation is too high" do @@ -47,9 +48,9 @@ module Impl user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if off variation is negative" do @@ -63,9 +64,9 @@ module Impl user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns off variation if prerequisite is not found" do @@ -80,9 +81,9 @@ module Impl user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "reuses prerequisite-failed reason instances if possible" do @@ -97,9 +98,9 @@ module Impl Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason user = { key: 'x' } e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result1 = e.evaluate(flag, user, factory) + result1 = e.evaluate(flag, user) expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') - result2 = e.evaluate(flag, user, factory) + result2 = e.evaluate(flag, user) expect(result2.detail.reason).to be result1.detail.reason end @@ -123,13 +124,13 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' - }] + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new(nil, nil, EvaluationReason::prerequisite_failed('feature2'))) + ] e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) + expect(result.prereq_evals).to eq(expected_prereqs) end it "returns off variation and event if prerequisite is off" do @@ -153,13 +154,13 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' - }] + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::off)) + ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) + expect(result.prereq_evals).to eq(expected_prereqs) end it "returns off variation and event if prerequisite is not met" do @@ -181,13 +182,13 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' - }] + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('d', 0, EvaluationReason::fallthrough)) + ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) + expect(result.prereq_evals).to eq(expected_prereqs) end it "returns fallthrough variation and event if prerequisite is met and there are no rules" do @@ -209,13 +210,13 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' - }] + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::fallthrough)) + ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, user, factory) + result = e.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) + expect(result.prereq_evals).to eq(expected_prereqs) end it "returns an error if fallthrough variation is too high" do @@ -228,9 +229,9 @@ module Impl } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if fallthrough variation is negative" do @@ -243,9 +244,9 @@ module Impl } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if fallthrough has no variation or rollout" do @@ -258,9 +259,9 @@ module Impl } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "returns an error if fallthrough has a rollout with no variations" do @@ -273,9 +274,9 @@ module Impl } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end it "matches user from targets" do @@ -291,9 +292,9 @@ module Impl } user = { key: 'userkey' } detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) + expect(result.prereq_evals).to eq(nil) end describe "experiment rollout behavior" do @@ -306,7 +307,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -320,7 +321,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -334,7 +335,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - result = basic_evaluator.evaluate(flag, user, factory) + result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index da8662ac..6008c8b9 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -75,10 +75,6 @@ def build end module EvaluatorSpecBase - def factory - EventFactory.new(false) - end - def user { key: "userkey", diff --git a/spec/impl/event_factory_spec.rb b/spec/impl/event_factory_spec.rb deleted file mode 100644 index 9da19de0..00000000 --- a/spec/impl/event_factory_spec.rb +++ /dev/null @@ -1,108 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::Impl::EventFactory do - subject { LaunchDarkly::Impl::EventFactory } - - describe "#new_eval_event" do - let(:event_factory_without_reason) { subject.new(false) } - let(:user) { { 'key': 'userA' } } - let(:rule_with_experiment_rollout) { - { id: 'ruleid', - clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - trackEvents: false, - rollout: { kind: 'experiment', salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ] } - } - } - - let(:rule_with_rollout) { - { id: 'ruleid', - trackEvents: false, - clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ] } - } - } - - let(:fallthrough_with_rollout) { - { rollout: { kind: 'rollout', salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ], trackEventsFallthrough: false } } - } - - let(:rule_reason) { LaunchDarkly::EvaluationReason::rule_match(0, 'ruleid') } - let(:rule_reason_with_experiment) { LaunchDarkly::EvaluationReason::rule_match(0, 'ruleid', true) } - let(:fallthrough_reason) { LaunchDarkly::EvaluationReason::fallthrough } - let(:fallthrough_reason_with_experiment) { LaunchDarkly::EvaluationReason::fallthrough(true) } - - context "in_experiment is true" do - it "sets the reason and trackevents: true for rules" do - flag = createFlag('rule', rule_with_experiment_rollout) - detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason_with_experiment) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to eql(true) - expect(r[:reason].to_s).to eql("RULE_MATCH(0,ruleid,true)") - end - - it "sets the reason and trackevents: true for the fallthrough" do - fallthrough_with_rollout[:kind] = 'experiment' - flag = createFlag('fallthrough', fallthrough_with_rollout) - detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason_with_experiment) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to eql(true) - expect(r[:reason].to_s).to eql("FALLTHROUGH(true)") - end - end - - context "in_experiment is false" do - it "sets the reason & trackEvents: true if rule has trackEvents set to true" do - rule_with_rollout[:trackEvents] = true - flag = createFlag('rule', rule_with_rollout) - detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to eql(true) - expect(r[:reason].to_s).to eql("RULE_MATCH(0,ruleid)") - end - - it "sets the reason & trackEvents: true if fallthrough has trackEventsFallthrough set to true" do - flag = createFlag('fallthrough', fallthrough_with_rollout) - flag[:trackEventsFallthrough] = true - detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to eql(true) - expect(r[:reason].to_s).to eql("FALLTHROUGH") - end - - it "doesn't set the reason & trackEvents if rule has trackEvents set to false" do - flag = createFlag('rule', rule_with_rollout) - detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to be_nil - expect(r[:reason]).to be_nil - end - - it "doesn't set the reason & trackEvents if fallthrough has trackEventsFallthrough set to false" do - flag = createFlag('fallthrough', fallthrough_with_rollout) - detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to be_nil - expect(r[:reason]).to be_nil - end - - it "sets trackEvents true and doesn't set the reason if flag[:trackEvents] = true" do - flag = createFlag('fallthrough', fallthrough_with_rollout) - flag[:trackEvents] = true - detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) - r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) - expect(r[:trackEvents]).to eql(true) - expect(r[:reason]).to be_nil - end - end - end - - def createFlag(kind, rule) - if kind == 'rule' - { key: 'feature', on: true, rules: [rule], fallthrough: { variation: 0 }, variations: [ false, true ] } - elsif kind == 'fallthrough' - { key: 'feature', on: true, fallthrough: rule, variations: [ false, true ] } - else - { key: 'feature', on: true, fallthrough: { variation: 0 }, variations: [ false, true ] } - end - end -end \ No newline at end of file diff --git a/spec/impl/event_summarizer_spec.rb b/spec/impl/event_summarizer_spec.rb new file mode 100644 index 00000000..bbd3f2ba --- /dev/null +++ b/spec/impl/event_summarizer_spec.rb @@ -0,0 +1,84 @@ +require "ldclient-rb/impl/event_types" + +require "events_test_util" +require "spec_helper" + +module LaunchDarkly + module Impl + describe EventSummarizer do + subject { EventSummarizer } + + let(:user) { { key: "key" } } + + it "does not add identify event to summary" do + es = subject.new + snapshot = es.snapshot + es.summarize_event({ kind: "identify", user: user }) + + expect(es.snapshot).to eq snapshot + end + + it "does not add custom event to summary" do + es = subject.new + snapshot = es.snapshot + es.summarize_event({ kind: "custom", key: "whatever", user: user }) + + expect(es.snapshot).to eq snapshot + end + + it "tracks start and end dates" do + es = subject.new + flag = { key: "key" } + event1 = make_eval_event(2000, user, 'flag1') + event2 = make_eval_event(1000, user, 'flag1') + event3 = make_eval_event(1500, user, 'flag1') + es.summarize_event(event1) + es.summarize_event(event2) + es.summarize_event(event3) + data = es.snapshot + + expect(data.start_date).to be 1000 + expect(data.end_date).to be 2000 + end + + it "counts events" do + es = subject.new + flag1 = { key: "key1", version: 11 } + flag2 = { key: "key2", version: 22 } + event1 = make_eval_event(0, user, 'key1', 11, 1, 'value1', nil, 'default1') + event2 = make_eval_event(0, user, 'key1', 11, 2, 'value2', nil, 'default1') + event3 = make_eval_event(0, user, 'key2', 22, 1, 'value99', nil, 'default2') + event4 = make_eval_event(0, user, 'key1', 11, 1, 'value99', nil, 'default1') + event5 = make_eval_event(0, user, 'badkey', nil, nil, 'default3', nil, 'default3') + [event1, event2, event3, event4, event5].each { |e| es.summarize_event(e) } + data = es.snapshot + + expectedCounters = { + 'key1' => EventSummaryFlagInfo.new( + 'default1', { + 11 => { + 1 => EventSummaryFlagVariationCounter.new('value1', 2), + 2 => EventSummaryFlagVariationCounter.new('value2', 1) + } + } + ), + 'key2' => EventSummaryFlagInfo.new( + 'default2', { + 22 => { + 1 => EventSummaryFlagVariationCounter.new('value99', 1) + } + } + ), + 'badkey' => EventSummaryFlagInfo.new( + 'default3', { + nil => { + nil => EventSummaryFlagVariationCounter.new('default3', 1) + } + } + ) + } + expect(data.counters).to eq expectedCounters + end + end + end +end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index b2afcc13..ba82617b 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -1,5 +1,6 @@ require "ldclient-rb" +require "events_test_util" require "mock_components" require "model_builders" require "spec_helper" @@ -19,9 +20,9 @@ def event_processor(client) context "evaluation events - variation" do it "unknown flag" do with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default" - )) + expect(event_processor(client)).to receive(:record_eval_event).with( + basic_user, 'badkey', nil, nil, 'default', nil, 'default', false, nil, nil + ) client.variation("badkey", basic_user, "default") end end @@ -31,15 +32,9 @@ def event_processor(client) td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "feature", - key: "flagkey", - version: 1, - user: basic_user, - variation: 0, - value: "value", - default: "default" - )) + expect(event_processor(client)).to receive(:record_eval_event).with( + basic_user, 'flagkey', 1, 0, 'value', nil, 'default', false, nil, nil + ) client.variation("flagkey", basic_user, "default") end end @@ -51,7 +46,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(data_source: td, logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_eval_event) expect(logger).to receive(:error) client.variation("flagkey", nil, "default") end @@ -65,7 +60,7 @@ def event_processor(client) keyless_user = { key: nil } with_client(test_config(data_source: td, logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_eval_event) expect(logger).to receive(:warn) client.variation("flagkey", keyless_user, "default") end @@ -81,17 +76,10 @@ def event_processor(client) ) with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "feature", - key: "flagkey", - version: 100, - user: basic_user, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') - )) + expect(event_processor(client)).to receive(:record_eval_event).with( + basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::rule_match(0, 'id'), + 'default', true, nil, nil + ) client.variation("flagkey", basic_user, "default") end end @@ -104,17 +92,10 @@ def event_processor(client) ) with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "feature", - key: "flagkey", - version: 100, - user: basic_user, - variation: 0, - value: "value", - default: "default", - trackEvents: true, - reason: LaunchDarkly::EvaluationReason::fallthrough - )) + expect(event_processor(client)).to receive(:record_eval_event).with( + basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::fallthrough, + 'default', true, nil, nil + ) client.variation("flagkey", basic_user, "default") end end @@ -123,10 +104,11 @@ def event_processor(client) context "evaluation events - variation_detail" do it "unknown flag" do with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "feature", key: "badkey", user: basic_user, value: "default", default: "default", - reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) - )) + expect(event_processor(client)).to receive(:record_eval_event).with( + basic_user, 'badkey', nil, nil, 'default', + LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND), + 'default', false, nil, nil + ) client.variation_detail("badkey", basic_user, "default") end end @@ -136,16 +118,10 @@ def event_processor(client) td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "feature", - key: "flagkey", - version: 1, - user: basic_user, - variation: 0, - value: "value", - default: "default", - reason: LaunchDarkly::EvaluationReason::off - )) + expect(event_processor(client)).to receive(:record_eval_event).with( + basic_user, 'flagkey', 1, 0, 'value', LaunchDarkly::EvaluationReason::off, + 'default', false, nil, nil + ) client.variation_detail("flagkey", basic_user, "default") end end @@ -157,7 +133,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(data_source: td, logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_eval_event) expect(logger).to receive(:error) client.variation_detail("flagkey", nil, "default") end @@ -170,7 +146,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(data_source: td, logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_eval_event) expect(logger).to receive(:warn) client.variation_detail("flagkey", { key: nil }, "default") end @@ -180,8 +156,7 @@ def event_processor(client) context "identify" do it "queues up an identify event" do with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "identify", key: basic_user[:key], user: basic_user)) + expect(event_processor(client)).to receive(:record_identify_event).with(basic_user) client.identify(basic_user) end end @@ -190,7 +165,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_identify_event) expect(logger).to receive(:warn) client.identify(nil) end @@ -200,7 +175,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_identify_event) expect(logger).to receive(:warn) client.identify({ key: "" }) end @@ -210,36 +185,30 @@ def event_processor(client) context "track" do it "queues up an custom event" do with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: basic_user, data: 42)) + expect(event_processor(client)).to receive(:record_custom_event).with( + basic_user, 'custom_event_name', 42, nil + ) client.track("custom_event_name", basic_user, 42) end end it "can include a metric value" do with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: basic_user, metricValue: 1.5)) + expect(event_processor(client)).to receive(:record_custom_event).with( + basic_user, 'custom_event_name', nil, 1.5 + ) client.track("custom_event_name", basic_user, nil, 1.5) end end - it "includes contextKind with anonymous user" do - anon_user = { key: 'user-key', anonymous: true } - - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "custom", key: "custom_event_name", user: anon_user, metricValue: 2.2, contextKind: "anonymousUser")) - client.track("custom_event_name", anon_user, nil, 2.2) - end - end - it "sanitizes the user in the event" do numeric_key_user = { key: 33 } sanitized_user = { key: "33" } with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including(user: sanitized_user)) + expect(event_processor(client)).to receive(:record_custom_event).with( + sanitized_user, 'custom_event_name', nil, nil + ) client.track("custom_event_name", numeric_key_user, nil) end end @@ -248,7 +217,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_custom_event) expect(logger).to receive(:warn) client.track("custom_event_name", nil, nil) end @@ -258,7 +227,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_custom_event) expect(logger).to receive(:warn) client.track("custom_event_name", { key: nil }, nil) end @@ -270,8 +239,7 @@ def event_processor(client) anon_user = { key: "user-key", anonymous: true } with_client(test_config) do |client| - expect(event_processor(client)).to receive(:add_event).with(hash_including( - kind: "alias", key: basic_user[:key], contextKind: "user", previousKey: anon_user[:key], previousContextKind: "anonymousUser")) + expect(event_processor(client)).to receive(:record_alias_event).with(basic_user, anon_user) client.alias(basic_user, anon_user) end end @@ -280,7 +248,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_alias_event) expect(logger).to receive(:warn) client.alias(nil, nil) end @@ -290,7 +258,7 @@ def event_processor(client) logger = double().as_null_object with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:add_event) + expect(event_processor(client)).not_to receive(:record_alias_event) expect(logger).to receive(:warn) client.alias({ key: nil }, { key: nil }) end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index c54ef444..9b87bc33 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -20,6 +20,9 @@ def ensure_stop(thing) end RSpec.configure do |config| + config.expect_with :rspec do |expectations| + expectations.max_formatted_output_length = 1000 # otherwise rspec tends to abbreviate our failure output and make it unreadable + end config.before(:each) do end end From 94f95f46199fc70b29ce9f6b3cf5e815b994bd9d Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 8 Jul 2022 14:21:39 -0400 Subject: [PATCH 242/292] Drop support for EOL ruby versions (#196) Ruby 2.5 was EOL 2021-04-05 As of June 27th, 2022, the latest jRuby is Ruby 2.6 compatible. --- .circleci/config.yml | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 7ec25b1a..3545fbc6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,9 +4,6 @@ workflows: version: 2 test: jobs: - - build-test-linux: - name: Ruby 2.5 - docker-image: cimg/ruby:2.5 - build-test-linux: name: Ruby 2.6 docker-image: cimg/ruby:2.6 @@ -16,6 +13,9 @@ workflows: - build-test-linux: name: Ruby 3.0 docker-image: cimg/ruby:3.0 + - build-test-linux: + name: Ruby 3.1 + docker-image: cimg/ruby:3.1 - build-test-linux: name: JRuby 9.3 docker-image: jruby:9.3-jdk diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 04262469..56335a4c 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -19,7 +19,7 @@ Gem::Specification.new do |spec| spec.files = FileList["lib/**/*", "README.md", "LICENSE.txt"] spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.require_paths = ["lib"] - spec.required_ruby_version = ">= 2.5.0" + spec.required_ruby_version = ">= 2.6.0" spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" spec.add_development_dependency "bundler", "2.2.33" From 684eddcff4723354020947fd31e6249790d7db41 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 8 Jul 2022 14:22:08 -0400 Subject: [PATCH 243/292] Remove alias support (#195) --- Makefile | 2 +- contract-tests/client_entity.rb | 4 --- contract-tests/service.rb | 3 -- contract-tests/testharness-suppressions.txt | 4 +++ lib/ldclient-rb/events.rb | 24 ---------------- lib/ldclient-rb/impl/event_types.rb | 15 ---------- lib/ldclient-rb/ldclient.rb | 17 ----------- spec/events_spec.rb | 16 ----------- spec/events_test_util.rb | 4 --- spec/ldclient_events_spec.rb | 31 --------------------- 10 files changed, 5 insertions(+), 115 deletions(-) create mode 100644 contract-tests/testharness-suppressions.txt diff --git a/Makefile b/Makefile index 5b264f57..25811a67 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,7 @@ start-contract-test-service-bg: run-contract-tests: @curl -s https://raw.githubusercontent.com/launchdarkly/sdk-test-harness/v1.0.0/downloader/run.sh \ - | VERSION=v1 PARAMS="-url http://localhost:9000 -debug -stop-service-at-end $(TEST_HARNESS_PARAMS)" sh + | VERSION=v1 PARAMS="-url http://localhost:9000 -debug -stop-service-at-end -skip-from ./contract-tests/testharness-suppressions.txt $(TEST_HARNESS_PARAMS)" sh contract-tests: build-contract-tests start-contract-test-service-bg run-contract-tests diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 699d8e72..9acf61e3 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -73,10 +73,6 @@ def identify(params) @client.identify(params[:user]) end - def alias(params) - @client.alias(params[:user], params[:previousUser]) - end - def flush_events @client.flush end diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 54cc0b73..e737ba92 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -87,9 +87,6 @@ when "identifyEvent" client.identify(params[:identifyEvent]) return 201 - when "aliasEvent" - client.alias(params[:aliasEvent]) - return 201 when "flushEvents" client.flush_events return 201 diff --git a/contract-tests/testharness-suppressions.txt b/contract-tests/testharness-suppressions.txt new file mode 100644 index 00000000..2b6f158b --- /dev/null +++ b/contract-tests/testharness-suppressions.txt @@ -0,0 +1,4 @@ +events/alias events/from non-anonymous to non-anonymous +events/alias events/from non-anonymous to anonymous +events/alias events/from anonymous to non-anonymous +events/alias events/from anonymous to anonymous diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index f2b3e9f9..fec6ece2 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -54,9 +54,6 @@ def record_custom_event( ) end - def record_alias_event(user, previous_user) - end - def flush end @@ -173,16 +170,6 @@ def record_custom_event(user, key, data = nil, metric_value = nil) post_to_inbox(LaunchDarkly::Impl::CustomEvent.new(timestamp, user, key, data, metric_value)) end - def record_alias_event(user, previous_user) - post_to_inbox(LaunchDarkly::Impl::AliasEvent.new( - timestamp, - user.nil? ? nil : user[:key], - user_to_context_kind(user), - previous_user.nil? ? nil : previous_user[:key], - user_to_context_kind(previous_user) - )) - end - def flush # flush is done asynchronously post_to_inbox(FlushMessage.new) @@ -462,7 +449,6 @@ class EventOutputFormatter FEATURE_KIND = 'feature' IDENTIFY_KIND = 'identify' CUSTOM_KIND = 'custom' - ALIAS_KIND = 'alias' INDEX_KIND = 'index' DEBUG_KIND = 'debug' SUMMARY_KIND = 'summary' @@ -521,16 +507,6 @@ def make_output_events(events, summary) set_opt_context_kind(out, event.user) out - when LaunchDarkly::Impl::AliasEvent - { - kind: ALIAS_KIND, - creationDate: event.timestamp, - key: event.key, - contextKind: event.context_kind, - previousKey: event.previous_key, - previousContextKind: event.previous_context_kind - } - when LaunchDarkly::Impl::IndexEvent { kind: INDEX_KIND, diff --git a/lib/ldclient-rb/impl/event_types.rb b/lib/ldclient-rb/impl/event_types.rb index 6ca043ba..3a30dbb0 100644 --- a/lib/ldclient-rb/impl/event_types.rb +++ b/lib/ldclient-rb/impl/event_types.rb @@ -57,21 +57,6 @@ def initialize(timestamp, user, key, data = nil, metric_value = nil) attr_reader :metric_value end - class AliasEvent < Event - def initialize(timestamp, key, context_kind, previous_key, previous_context_kind) - super(timestamp, nil) - @key = key - @context_kind = context_kind - @previous_key = previous_key - @previous_context_kind = previous_context_kind - end - - attr_reader :key - attr_reader :context_kind - attr_reader :previous_key - attr_reader :previous_context_kind - end - class IndexEvent < Event def initialize(timestamp, user) super(timestamp, user) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 70dc6210..3fd524ab 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -283,23 +283,6 @@ def track(event_name, user, data = nil, metric_value = nil) @event_processor.record_custom_event(user, event_name, data, metric_value) end - # - # Associates a new and old user object for analytics purposes via an alias event. - # - # @param current_context [Hash] The current version of a user. - # @param previous_context [Hash] The previous version of a user. - # @return [void] - # - def alias(current_context, previous_context) - if !current_context || current_context[:key].nil? || !previous_context || previous_context[:key].nil? - @config.logger.warn("Alias called with nil user or nil user key!") - return - end - sanitize_user(current_context) - sanitize_user(previous_context) - @event_processor.record_alias_event(current_context, previous_context) - end - # # Returns all feature flag values for the given user. # diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 894c3f70..c1449720 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -343,22 +343,6 @@ def with_processor_and_sender(config) end end - it "queues alias event" do - with_processor_and_sender(default_config) do |ep, sender| - ep.record_alias_event({ key: 'a' }, { key: 'b', anonymous: true }) - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly({ - creationDate: starting_timestamp, - kind: 'alias', - key: 'a', - contextKind: 'user', - previousKey: 'b', - previousContextKind: 'anonymousUser' - }) - end - end - it "treats nil value for custom the same as an empty hash" do with_processor_and_sender(default_config) do |ep, sender| user_with_nil_custom = { key: "userkey", custom: nil } diff --git a/spec/events_test_util.rb b/spec/events_test_util.rb index 66b5b97d..45c27795 100644 --- a/spec/events_test_util.rb +++ b/spec/events_test_util.rb @@ -13,7 +13,3 @@ def make_identify_event(timestamp, user) def make_custom_event(timestamp, user, key, data = nil, metric_value = nil) LaunchDarkly::Impl::CustomEvent.new(timestamp, user, key, data, metric_value) end - -def make_alias_event(timestamp, key, context_kind, previous_key, previous_context_kind) - LaunchDarkly::Impl::AliasEvent.new(timestamp, key, context_kind, previous_key, previous_context_kind) -end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index ba82617b..4c77787b 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -233,36 +233,5 @@ def event_processor(client) end end end - - context "alias" do - it "queues up an alias event" do - anon_user = { key: "user-key", anonymous: true } - - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:record_alias_event).with(basic_user, anon_user) - client.alias(basic_user, anon_user) - end - end - - it "does not send event, and logs warning, if user is nil" do - logger = double().as_null_object - - with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:record_alias_event) - expect(logger).to receive(:warn) - client.alias(nil, nil) - end - end - - it "does not send event, and logs warning, if user key is nil" do - logger = double().as_null_object - - with_client(test_config(logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:record_alias_event) - expect(logger).to receive(:warn) - client.alias({ key: nil }, { key: nil }) - end - end - end end end From 2635e0d1af1bd1d80f4e1cd1b67ff3ece3cb1d36 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 11 Jul 2022 09:50:41 -0400 Subject: [PATCH 244/292] Add polling support for contract test service (#198) --- contract-tests/client_entity.rb | 5 +++++ contract-tests/service.rb | 1 + 2 files changed, 6 insertions(+) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 699d8e72..2882068b 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -14,6 +14,11 @@ def initialize(log, config) streaming = config[:streaming] opts[:stream_uri] = streaming[:baseUri] if !streaming[:baseUri].nil? opts[:initial_reconnect_delay] = streaming[:initialRetryDelayMs] / 1_000.0 if !streaming[:initialRetryDelayMs].nil? + elsif config[:polling] + polling = config[:polling] + opts[:stream] = false + opts[:base_uri] = polling[:baseUri] if !polling[:baseUri].nil? + opts[:poll_interval] = polling[:pollIntervalMs] / 1_000.0 if !polling[:pollIntervalMs].nil? end if config[:events] diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 54cc0b73..9534c25d 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -25,6 +25,7 @@ { capabilities: [ 'server-side', + 'server-side-polling', 'all-flags-with-reasons', 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', From a656f9dd64070b727a93553c652488bc691fd72b Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 11 Jul 2022 10:03:21 -0400 Subject: [PATCH 245/292] Update rubocop and enable in CI (#197) Several of the Rubocop cop definitions have been renamed or moved to entirely other gems. This brings the configuration up to date with the latest naming conventions. --- .circleci/config.yml | 7 + .hound.yml | 2 - .rubocop.yml | 398 ++++++++++++++++-- contract-tests/client_entity.rb | 2 +- contract-tests/service.rb | 2 +- lib/ldclient-rb/config.rb | 10 +- lib/ldclient-rb/evaluation_detail.rb | 14 +- lib/ldclient-rb/events.rb | 26 +- lib/ldclient-rb/impl/big_segments.rb | 6 +- lib/ldclient-rb/impl/diagnostic_events.rb | 12 +- lib/ldclient-rb/impl/evaluator.rb | 12 +- lib/ldclient-rb/impl/event_sender.rb | 2 +- .../impl/integrations/consul_impl.rb | 6 +- .../impl/integrations/dynamodb_impl.rb | 40 +- .../impl/integrations/file_data_source.rb | 8 +- .../impl/integrations/redis_impl.rb | 1 + lib/ldclient-rb/impl/util.rb | 2 +- lib/ldclient-rb/in_memory_store.rb | 4 +- lib/ldclient-rb/integrations/file_data.rb | 2 +- lib/ldclient-rb/integrations/test_data.rb | 2 +- .../integrations/test_data/flag_builder.rb | 24 +- .../integrations/util/store_wrapper.rb | 2 +- lib/ldclient-rb/ldclient.rb | 14 +- lib/ldclient-rb/requestor.rb | 4 +- lib/ldclient-rb/stream.rb | 8 +- lib/ldclient-rb/user_filter.rb | 2 +- lib/ldclient-rb/util.rb | 4 +- spec/big_segment_store_spec_base.rb | 26 +- spec/diagnostic_events_spec.rb | 16 +- spec/event_sender_spec.rb | 8 +- spec/events_spec.rb | 36 +- spec/expiring_cache_spec.rb | 6 +- spec/feature_store_spec_base.rb | 14 +- spec/flags_state_spec.rb | 11 +- spec/http_util.rb | 4 +- spec/impl/big_segments_spec.rb | 6 +- spec/impl/evaluator_big_segments_spec.rb | 84 ++-- spec/impl/evaluator_bucketing_spec.rb | 38 +- spec/impl/evaluator_operators_spec.rb | 20 +- spec/impl/evaluator_segment_spec.rb | 14 +- spec/impl/evaluator_spec.rb | 60 +-- spec/impl/evaluator_spec_base.rb | 8 +- spec/impl/event_summarizer_spec.rb | 14 +- spec/impl/repeating_task_spec.rb | 2 +- .../integrations/consul_feature_store_spec.rb | 4 +- spec/integrations/dynamodb_stores_spec.rb | 26 +- spec/integrations/redis_stores_spec.rb | 4 +- spec/integrations/test_data_spec.rb | 14 +- spec/ldclient_end_to_end_spec.rb | 16 +- spec/ldclient_evaluation_spec.rb | 30 +- spec/ldclient_events_spec.rb | 24 +- spec/ldclient_spec.rb | 10 +- spec/mock_components.rb | 2 +- spec/model_builders.rb | 14 +- spec/polling_spec.rb | 6 +- spec/requestor_spec.rb | 6 +- spec/segment_store_spec_base.rb | 2 +- 57 files changed, 746 insertions(+), 395 deletions(-) delete mode 100644 .hound.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index 3545fbc6..7e147b3e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,9 +1,16 @@ version: 2.1 +orbs: + rubocop: hanachin/rubocop@0.0.6 + workflows: version: 2 test: jobs: + - rubocop/rubocop: + after-install-rubocop: + - run: gem install rubocop-performance + - run: gem install rubocop-rails - build-test-linux: name: Ruby 2.6 docker-image: cimg/ruby:2.6 diff --git a/.hound.yml b/.hound.yml deleted file mode 100644 index 2606b3b5..00000000 --- a/.hound.yml +++ /dev/null @@ -1,2 +0,0 @@ -ruby: - config_file: .rubocop diff --git a/.rubocop.yml b/.rubocop.yml index 85b05f8b..a63d0b3a 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,11 +1,22 @@ +require: + - rubocop-rails + - rubocop-performance + AllCops: - Exclude: - - db/schema.rb + Include: + - lib/**/*.rb + - spec/**/*.rb + - contract-tests/**/*.rb -Style/AccessorMethodName: +Naming/AccessorMethodName: Description: Check the naming of accessor methods for get_/set_. Enabled: false +Style/AccessModifierDeclarations: + Description: 'Access modifiers should be declared to apply to a group of methods or inline before each method, depending on configuration.' + StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#alias-method' + Enabled: false + Style/Alias: Description: 'Use alias_method instead of alias.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#alias-method' @@ -21,16 +32,28 @@ Style/AsciiComments: StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#english-comments' Enabled: false -Style/AsciiIdentifiers: +Naming/AsciiIdentifiers: Description: 'Use only ascii symbols in identifiers.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#english-identifiers' Enabled: false +Naming/VariableName: + Description: 'Makes sure that all variables use the configured style, snake_case or camelCase, for their names.' + Enabled: false + Style/Attr: Description: 'Checks for uses of Module#attr.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#attr' Enabled: false +Metrics/AbcSize: + Description: 'Checks that the ABC size of methods is not higher than the configured maximum.' + Enabled: false + +Metrics/BlockLength: + Description: 'Checks if the length of a block exceeds some maximum value.' + Enabled: false + Metrics/BlockNesting: Description: 'Avoid excessive block nesting' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#three-is-the-number-thou-shalt-count' @@ -128,16 +151,20 @@ Style/EvenOdd: StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#predicate-methods' Enabled: false -Style/FileName: +Naming/FileName: Description: 'Use snake_case for source file names.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#snake-case-files' Enabled: false -Style/FlipFlop: +Lint/FlipFlop: Description: 'Checks for flip flops' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-flip-flops' Enabled: false +Style/FrozenStringLiteralComment: + Description: 'Helps you transition from mutable string literals to frozen string literals.' + Enabled: false + Style/FormatString: Description: 'Enforce the use of Kernel#sprintf, Kernel#format or String#%.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#sprintf' @@ -186,10 +213,10 @@ Style/LineEndConcatenation: line end. Enabled: false -Metrics/LineLength: +Layout/LineLength: Description: 'Limit lines to 150 characters.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#80-character-limits' - Max: 150 + Max: 180 Metrics/MethodLength: Description: 'Avoid methods longer than 10 lines of code.' @@ -242,7 +269,7 @@ Style/OneLineConditional: StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#ternary-operator' Enabled: false -Style/OpMethod: +Naming/BinaryOperatorParameterName: Description: 'When defining binary operators, name the argument other.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#other-arg' Enabled: false @@ -252,6 +279,9 @@ Metrics/ParameterLists: StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#too-many-params' Enabled: false +Metrics/PerceivedComplexity: + Enabled: false + Style/PercentLiteralDelimiters: Description: 'Use `%`-literal delimiters consistently' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#percent-literal-braces' @@ -262,10 +292,10 @@ Style/PerlBackrefs: StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#no-perl-regexp-last-matchers' Enabled: false -Style/PredicateName: +Naming/PredicateName: Description: 'Check the names of predicate methods.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#bool-methods-qmark' - NamePrefixBlacklist: + ForbiddenPrefixes: - is_ Exclude: - spec/**/* @@ -316,17 +346,20 @@ Style/StringLiterals: Description: 'Checks if uses of quotes match the configured preference.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#consistent-string-literals' EnforcedStyle: double_quotes - Enabled: true + Enabled: false Style/TrailingCommaInArguments: Description: 'Checks for trailing comma in argument lists.' StyleGuide: '#no-trailing-params-comma' Enabled: true -Style/TrailingCommaInLiteral: +Style/TrailingCommaInArrayLiteral: Description: 'Checks for trailing comma in array and hash literals.' - StyleGuide: '#no-trailing-array-commas' - Enabled: true + EnforcedStyleForMultiline: comma + +Style/TrailingCommaInHashLiteral: + Description: 'Checks for trailing comma in array and hash literals.' + EnforcedStyleForMultiline: comma Style/TrivialAccessors: Description: 'Prefer attr_* methods to trivial readers/writers.' @@ -361,11 +394,12 @@ Style/WordArray: Layout/DotPosition: Description: 'Checks the position of the dot in multi-line method calls.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#consistent-multi-line-chains' - EnforcedStyle: trailing + EnforcedStyle: leading Layout/ExtraSpacing: Description: 'Do not use unnecessary spacing.' Enabled: true + AllowBeforeTrailingComments: true Layout/MultilineOperationIndentation: Description: >- @@ -379,6 +413,13 @@ Layout/InitialIndentation: Checks the indentation of the first non-blank non-comment line in a file. Enabled: false +Layout/SpaceInsideArrayLiteralBrackets: + Description: "Checks that brackets used for array literals have or don't have surrounding space depending on configuration." + Enabled: false + +Layout/TrailingWhitespace: + Description: "Ensures all trailing whitespace has been removed" + Enabled: true # Lint @@ -404,7 +445,7 @@ Lint/CircularArgumentReference: Description: "Don't refer to the keyword argument in the default value." Enabled: false -Lint/ConditionPosition: +Layout/ConditionPosition: Description: >- Checks for condition placed in a confusing position relative to the keyword. @@ -415,7 +456,7 @@ Lint/DeprecatedClassMethods: Description: 'Check for deprecated class method calls.' Enabled: false -Lint/DuplicatedKey: +Lint/DuplicateHashKey: Description: 'Check for duplicate keys in hash literals.' Enabled: false @@ -431,18 +472,12 @@ Lint/FormatParameterMismatch: Description: 'The number of parameters to format/sprint must match the fields.' Enabled: false -Lint/HandleExceptions: +Lint/SuppressedException: Description: "Don't suppress exception." StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#dont-hide-exceptions' Enabled: false -Lint/InvalidCharacterLiteral: - Description: >- - Checks for invalid character literals with a non-escaped - whitespace character. - Enabled: false - -Lint/LiteralInCondition: +Lint/LiteralAsCondition: Description: 'Checks of literals used in conditions.' Enabled: false @@ -483,7 +518,7 @@ Lint/UnderscorePrefixedVariableName: Description: 'Do not use prefix `_` for a variable that is used.' Enabled: false -Lint/UnneededDisable: +Lint/RedundantCopDisableDirective: Description: >- Checks for rubocop:disable comments that can be removed. Note: this cop is not disabled when disabling all cops. @@ -529,7 +564,7 @@ Performance/ReverseEach: Reference: 'https://github.com/JuanitoFatas/fast-ruby#enumerablereverseeach-vs-enumerablereverse_each-code' Enabled: false -Performance/Sample: +Style/Sample: Description: >- Use `sample` instead of `shuffle.first`, `shuffle.last`, and `shuffle[Fixnum]`. @@ -598,3 +633,310 @@ Rails/TimeZone: Rails/Validation: Description: 'Use validates :attribute, hash of validations.' Enabled: false + +# Disabled temporarily while we bring code base inline +Layout/ArgumentAlignment: + Enabled: false + +Layout/ArrayAlignment: + Enabled: false + +Layout/BlockEndNewline: + Enabled: false + +Layout/CaseIndentation: + Enabled: false + +Layout/ClosingHeredocIndentation: + Enabled: false + +Layout/ClosingParenthesisIndentation: + Enabled: false + +Layout/CommentIndentation: + Enabled: false + +Layout/ElseAlignment: + Enabled: false + +Layout/EmptyLineAfterGuardClause: + Enabled: false + +Layout/EmptyLineBetweenDefs: + Enabled: false + +Layout/EmptyLines: + Enabled: false + +Layout/EmptyLinesAroundBlockBody: + Enabled: false + +Layout/EmptyLinesAroundMethodBody: + Enabled: false + +Layout/EmptyLinesAroundModuleBody: + Enabled: false + +Layout/EndAlignment: + Enabled: false + +Layout/FirstArgumentIndentation: + Enabled: false + +Layout/FirstHashElementIndentation: + Enabled: false + +Layout/HashAlignment: + Enabled: false + +Layout/HeredocIndentation: + Enabled: false + +Layout/IndentationWidth: + Enabled: false + +Layout/LeadingCommentSpace: + Enabled: false + +Layout/LeadingEmptyLines: + Enabled: false + +Layout/MultilineArrayBraceLayout: + Enabled: false + +Layout/MultilineBlockLayout: + Enabled: false + +Layout/MultilineHashBraceLayout: + Enabled: false + +Layout/MultilineMethodCallBraceLayout: + Enabled: false + +Layout/MultilineMethodCallIndentation: + Enabled: false + +Layout/ParameterAlignment: + Enabled: false + +Layout/SpaceAfterComma: + Enabled: false + +Layout/SpaceAroundBlockParameters: + Enabled: false + +Layout/SpaceAroundEqualsInParameterDefault: + Enabled: false + +Layout/SpaceAroundOperators: + Enabled: false + +Layout/SpaceBeforeBlockBraces: + Enabled: false + +Layout/SpaceBeforeComma: + Enabled: false + +Layout/SpaceInsideBlockBraces: + Enabled: false + +Layout/SpaceInsideHashLiteralBraces: + Enabled: false + +Layout/SpaceInsideReferenceBrackets: + Enabled: false + +Layout/TrailingEmptyLines: + Enabled: false + +Lint/ConstantDefinitionInBlock: + Enabled: false + +Lint/IneffectiveAccessModifier: + Enabled: false + +Lint/MissingCopEnableDirective: + Enabled: false + +Lint/RedundantRequireStatement: + Enabled: false + +Lint/StructNewOverride: + Enabled: false + +Lint/UnusedBlockArgument: + Enabled: false + +Lint/UnusedMethodArgument: + Enabled: false + +Lint/UselessAccessModifier: + Enabled: false + +Lint/UselessAssignment: + Enabled: false + +Lint/UselessMethodDefinition: + Enabled: false + +Naming/BlockParameterName: + Enabled: false + +Naming/HeredocDelimiterNaming: + Enabled: false + +Naming/MethodParameterName: + Enabled: false + +Naming/RescuedExceptionsVariableName: + Enabled: false + +Naming/VariableNumber: + Enabled: false + +Rails/SkipsModelValidations: + Enabled: false + +Style/AccessorGrouping: + Enabled: false + +Style/AndOr: + Enabled: false + +Style/BlockDelimiters: + Enabled: false + +Style/CaseLikeIf: + Enabled: false + +Style/CombinableLoops: + Enabled: false + +Style/CommentedKeyword: + Enabled: false + +Style/ConditionalAssignment: + Enabled: false + +Style/DefWithParentheses: + Enabled: false + +Style/EmptyElse: + Enabled: false + +Style/EmptyMethod: + Enabled: false + +Style/ExplicitBlockArgument: + Enabled: false + +Style/For: + Enabled: false + +Style/FormatStringToken: + Enabled: false + +Style/GlobalStdStream: + Enabled: false + +Style/HashEachMethods: + Enabled: false + +Style/HashSyntax: + Enabled: false + +Style/InfiniteLoop: + Enabled: false + +Style/InverseMethods: + Enabled: false + +Style/MethodCallWithoutArgsParentheses: + Enabled: false + +Style/MissingRespondToMissing: + Enabled: false + +Style/MultilineIfThen: + Enabled: false + +Style/MultilineTernaryOperator: + Enabled: false + +Style/MultipleComparison: + Enabled: false + +Style/MutableConstant: + Enabled: false + +Style/NumericPredicate: + Enabled: false + +Style/OptionalBooleanParameter: + Enabled: false + +Style/ParallelAssignment: + Enabled: false + +Style/RedundantAssignment: + Enabled: false + +Style/RedundantBegin: + Enabled: false + +Style/RedundantCondition: + Enabled: false + +Style/RedundantException: + Enabled: false + +Style/RedundantFileExtensionInRequire: + Enabled: false + +Style/RedundantParentheses: + Enabled: false + +Style/RedundantRegexpEscape: + Enabled: false + +Style/RedundantReturn: + Enabled: false + +Style/RedundantSelf: + Enabled: false + +Style/RescueStandardError: + Enabled: false + +Style/SafeNavigation: + Enabled: false + +Style/Semicolon: + Enabled: false + +Style/SlicingWithRange: + Enabled: false + +Style/SoleNestedConditional: + Enabled: false + +Style/StringConcatenation: + Enabled: false + +Style/SymbolArray: + Enabled: false + +Style/SymbolProc: + Enabled: false + +Style/TernaryParentheses: + Enabled: false + +Style/TrailingUnderscoreVariable: + Enabled: false + +Style/WhileUntilDo: + Enabled: false + +Style/ZeroLengthPredicate: + Enabled: false diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 588402ac..9ef9cc5a 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -29,7 +29,7 @@ def initialize(log, config) opts[:all_attributes_private] = !!events[:allAttributesPrivate] opts[:private_attribute_names] = events[:globalPrivateAttributes] opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) if !events[:flushIntervalMs].nil? - opts[:inline_users_in_events] = events[:inlineUsers] || false + opts[:inline_users_in_events] = events[:inlineUsers] || false else opts[:send_events] = false end diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 7b546a06..b84881dc 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -29,7 +29,7 @@ 'all-flags-with-reasons', 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', - ] + ], }.to_json end diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index ed33e08b..795781c8 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -124,7 +124,7 @@ def stream? def use_ldd? @use_ldd end - + # # Whether the client should be initialized in offline mode. In offline mode, default values are # returned for all flags and no remote network requests are made. @@ -228,7 +228,7 @@ def offline? # @see #all_attributes_private # attr_reader :private_attribute_names - + # # Whether to send events back to LaunchDarkly. This differs from {#offline?} in that it affects # only the sending of client-side events, not streaming or polling for events from the server. @@ -286,7 +286,7 @@ def offline? # @deprecated This is replaced by {#data_source}. attr_reader :update_processor - + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor_factory @@ -426,8 +426,8 @@ def self.default_connect_timeout # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) - Rails.logger - else + Rails.logger + else log = ::Logger.new($stdout) log.level = ::Logger::WARN log diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index 676da7a3..cafeff73 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -70,20 +70,20 @@ def ==(other) class EvaluationReason # Value for {#kind} indicating that the flag was off and therefore returned its configured off value. OFF = :OFF - + # Value for {#kind} indicating that the flag was on but the user did not match any targets or rules. FALLTHROUGH = :FALLTHROUGH - + # Value for {#kind} indicating that the user key was specifically targeted for this flag. TARGET_MATCH = :TARGET_MATCH - + # Value for {#kind} indicating that the user matched one of the flag's rules. RULE_MATCH = :RULE_MATCH - + # Value for {#kind} indicating that the flag was considered off because it had at least one # prerequisite flag that either was off or did not return the desired variation. PREREQUISITE_FAILED = :PREREQUISITE_FAILED - + # Value for {#kind} indicating that the flag could not be evaluated, e.g. because it does not exist # or due to an unexpected error. In this case the result value will be the application default value # that the caller passed to the client. Check {#error_kind} for more details on the problem. @@ -178,7 +178,7 @@ def self.target_match def self.rule_match(rule_index, rule_id, in_experiment=false) raise ArgumentError.new("rule_index must be a number") if !(rule_index.is_a? Numeric) raise ArgumentError.new("rule_id must be a string") if !rule_id.nil? && !(rule_id.is_a? String) # in test data, ID could be nil - + if in_experiment er = new(:RULE_MATCH, rule_index, rule_id, nil, nil, true) else @@ -348,7 +348,7 @@ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_exper ERROR_FLAG_NOT_FOUND => make_error(ERROR_FLAG_NOT_FOUND), ERROR_MALFORMED_FLAG => make_error(ERROR_MALFORMED_FLAG), ERROR_USER_NOT_SPECIFIED => make_error(ERROR_USER_NOT_SPECIFIED), - ERROR_EXCEPTION => make_error(ERROR_EXCEPTION) + ERROR_EXCEPTION => make_error(ERROR_EXCEPTION), } end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index fec6ece2..cd00f294 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -90,7 +90,7 @@ class SynchronousMessage def initialize @reply = Concurrent::Semaphore.new(0) end - + def completed @reply.release end @@ -234,7 +234,7 @@ def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @last_known_past_time = Concurrent::AtomicReference.new(0) @deduplicated_users = 0 @events_in_last_batch = 0 - + outbox = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) @@ -352,7 +352,7 @@ def trigger_flush(outbox, flush_workers) return end - payload = outbox.get_payload + payload = outbox.get_payload if !payload.events.empty? || !payload.summary.counters.empty? count = payload.events.length + (payload.summary.counters.empty? ? 0 : 1) @events_in_last_batch = count @@ -470,13 +470,13 @@ def make_output_events(events, summary) private def make_output_event(event) case event - + when LaunchDarkly::Impl::EvalEvent out = { kind: FEATURE_KIND, creationDate: event.timestamp, key: event.key, - value: event.value + value: event.value, } out[:default] = event.default if !event.default.nil? out[:variation] = event.variation if !event.variation.nil? @@ -492,14 +492,14 @@ def make_output_events(events, summary) kind: IDENTIFY_KIND, creationDate: event.timestamp, key: event.user[:key].to_s, - user: process_user(event.user) + user: process_user(event.user), } - + when LaunchDarkly::Impl::CustomEvent out = { kind: CUSTOM_KIND, creationDate: event.timestamp, - key: event.key + key: event.key, } out[:data] = event.data if !event.data.nil? set_user_or_user_key(out, event.user) @@ -511,9 +511,9 @@ def make_output_events(events, summary) { kind: INDEX_KIND, creationDate: event.timestamp, - user: process_user(event.user) + user: process_user(event.user), } - + when LaunchDarkly::Impl::DebugEvent original = event.eval_event out = { @@ -521,7 +521,7 @@ def make_output_events(events, summary) creationDate: original.timestamp, key: original.key, user: process_user(original.user), - value: original.value + value: original.value, } out[:default] = original.default if !original.default.nil? out[:variation] = original.variation if !original.variation.nil? @@ -545,7 +545,7 @@ def make_output_events(events, summary) variations.each do |variation, counter| c = { value: counter.value, - count: counter.count + count: counter.count, } c[:variation] = variation if !variation.nil? if version.nil? @@ -562,7 +562,7 @@ def make_output_events(events, summary) kind: SUMMARY_KIND, startDate: summary[:start_date], endDate: summary[:end_date], - features: flags + features: flags, } end diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index c2d82cd8..3ec02671 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -65,18 +65,18 @@ def poll_store_and_update_status if !@store.nil? begin metadata = @store.get_metadata - new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || is_stale(metadata.last_up_to_date)) + new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || stale?(metadata.last_up_to_date)) rescue => e LaunchDarkly::Util.log_exception(@logger, "Big Segment store status query returned error", e) end end @last_status = new_status @status_provider.update_status(new_status) - + new_status end - def is_stale(timestamp) + def stale?(timestamp) !timestamp || ((Impl::Util.current_time_millis - timestamp) >= @stale_after_millis) end diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb index 13a55756..21123940 100644 --- a/lib/ldclient-rb/impl/diagnostic_events.rb +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -9,7 +9,7 @@ class DiagnosticAccumulator def self.create_diagnostic_id(sdk_key) { diagnosticId: SecureRandom.uuid, - sdkKeySuffix: sdk_key[-6..-1] || sdk_key + sdkKeySuffix: sdk_key[-6..-1] || sdk_key, } end @@ -31,10 +31,10 @@ def create_init_event(config) id: @id, configuration: DiagnosticAccumulator.make_config_data(config), sdk: DiagnosticAccumulator.make_sdk_data(config), - platform: DiagnosticAccumulator.make_platform_data + platform: DiagnosticAccumulator.make_platform_data, } end - + def record_stream_init(timestamp, failed, duration_millis) @lock.synchronize do @stream_inits.push({ timestamp: timestamp, failed: failed, durationMillis: duration_millis }) @@ -57,7 +57,7 @@ def create_periodic_event_and_reset(dropped_events, deduplicated_users, events_i droppedEvents: dropped_events, deduplicatedUsers: deduplicated_users, eventsInLastBatch: events_in_last_batch, - streamInits: previous_stream_inits + streamInits: previous_stream_inits, } @data_since_date = current_time event @@ -88,7 +88,7 @@ def self.make_config_data(config) def self.make_sdk_data(config) ret = { name: 'ruby-server-sdk', - version: LaunchDarkly::VERSION + version: LaunchDarkly::VERSION, } if config.wrapper_name ret[:wrapperName] = config.wrapper_name @@ -105,7 +105,7 @@ def self.make_platform_data osName: self.normalize_os_name(conf['host_os']), osVersion: 'unknown', # there seems to be no portable way to detect this in Ruby rubyVersion: conf['ruby_version'], - rubyImplementation: Object.constants.include?(:RUBY_ENGINE) ? RUBY_ENGINE : 'unknown' + rubyImplementation: Object.constants.include?(:RUBY_ENGINE) ? RUBY_ENGINE : 'unknown', } end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index ed94719e..7ff4df7b 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -29,7 +29,7 @@ def initialize(get_flag, get_segment, get_big_segments_membership, logger) @get_big_segments_membership = get_big_segments_membership @logger = logger end - + # Used internally to hold an evaluation result and additional state that may be accumulated during an # evaluation. It's simpler and a bit more efficient to represent these as mutable properties rather than # trying to use a pure functional approach, and since we're not exposing this object to any application code @@ -57,14 +57,14 @@ def self.error_result(errorKind, value = nil) # # @param flag [Object] the flag # @param user [Object] the user properties - # @return [EvalResult] the evaluation result + # @return [EvalResult] the evaluation result def evaluate(flag, user) result = EvalResult.new if user.nil? || user[:key].nil? result.detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED) return result end - + detail = eval_internal(flag, user, result) if !result.big_segments_status.nil? # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at @@ -84,7 +84,7 @@ def self.make_big_segment_ref(segment) # method is visible for testing end private - + def eval_internal(flag, user, state) if !flag[:on] return get_off_value(flag, EvaluationReason::off) @@ -103,7 +103,7 @@ def eval_internal(flag, user, state) end end end - + # Check custom rules rules = flag[:rules] || [] rules.each_index do |i| @@ -245,7 +245,7 @@ def segment_rule_match_user(rule, user, segment_key, salt) # If the weight is absent, this rule matches return true if !rule[:weight] - + # All of the clauses are met. See if the user buckets in bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) weight = rule[:weight].to_f / 100000.0 diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index 442af033..5a77a8c1 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -49,7 +49,7 @@ def send_event_data(event_data, description, is_diagnostic) end response = http_client.request("POST", uri, { headers: headers, - body: event_data + body: event_data, }) rescue StandardError => exn @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 2f186dab..f381d578 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -51,10 +51,10 @@ def init_internal(all_data) unused_old_keys.each do |key| ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) end - + # Now set the special key that we check in initialized_internal? ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => inited_key, 'Value' => '' } }) - + ConsulUtil.batch_operations(ops) @logger.info { "Initialized database with #{num_items} items" } @@ -132,7 +132,7 @@ def item_key(kind, key) def kind_key(kind) @prefix + kind[:namespace] + '/' end - + def inited_key @prefix + '$inited' end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 7244fc9b..bb2fd2df 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -16,7 +16,7 @@ class DynamoDBStoreImplBase AWS_SDK_ENABLED = false end end - + PARTITION_KEY = "namespace" SORT_KEY = "key" @@ -24,20 +24,20 @@ def initialize(table_name, opts) if !AWS_SDK_ENABLED raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem") end - + @table_name = table_name @prefix = opts[:prefix] ? (opts[:prefix] + ":") : "" @logger = opts[:logger] || Config.default_logger - + if !opts[:existing_client].nil? @client = opts[:existing_client] else @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end - + @logger.info("#{description}: using DynamoDB table \"#{table_name}\"") end - + def stop # AWS client doesn't seem to have a close method end @@ -46,7 +46,7 @@ def stop "DynamoDB" end end - + # # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. # @@ -83,7 +83,7 @@ def init_internal(all_data) del_item = make_keys_hash(tuple[0], tuple[1]) requests.push({ delete_request: { key: del_item } }) end - + # Now set the special key that we check in initialized_internal? inited_item = make_keys_hash(inited_key, inited_key) requests.push({ put_request: { item: inited_item } }) @@ -123,11 +123,11 @@ def upsert_internal(kind, new_item) expression_attribute_names: { "#namespace" => PARTITION_KEY, "#key" => SORT_KEY, - "#version" => VERSION_ATTRIBUTE + "#version" => VERSION_ATTRIBUTE, }, expression_attribute_values: { - ":version" => new_item[:version] - } + ":version" => new_item[:version], + }, }) new_item rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException @@ -159,7 +159,7 @@ def inited_key def make_keys_hash(namespace, key) { PARTITION_KEY => namespace, - SORT_KEY => key + SORT_KEY => key, } end @@ -170,16 +170,16 @@ def make_query_for_kind(kind) key_conditions: { PARTITION_KEY => { comparison_operator: "EQ", - attribute_value_list: [ namespace_for_kind(kind) ] - } - } + attribute_value_list: [ namespace_for_kind(kind) ], + }, + }, } end def get_item_by_keys(namespace, key) @client.get_item({ table_name: @table_name, - key: make_keys_hash(namespace, key) + key: make_keys_hash(namespace, key), }) end @@ -190,8 +190,8 @@ def read_existing_keys(kinds) projection_expression: "#namespace, #key", expression_attribute_names: { "#namespace" => PARTITION_KEY, - "#key" => SORT_KEY - } + "#key" => SORT_KEY, + }, }) while true resp = @client.query(req) @@ -210,7 +210,7 @@ def read_existing_keys(kinds) def marshal_item(kind, item) make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ VERSION_ATTRIBUTE => item[:version], - ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item) + ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item), }) end @@ -243,7 +243,7 @@ def get_metadata table_name: @table_name, key: { PARTITION_KEY => key, - SORT_KEY => key + SORT_KEY => key, } ) timestamp = data.item && data.item[ATTR_SYNC_TIME] ? @@ -256,7 +256,7 @@ def get_membership(user_hash) table_name: @table_name, key: { PARTITION_KEY => @prefix + KEY_USER_DATA, - SORT_KEY => user_hash + SORT_KEY => user_hash, }) return nil if !data.item excluded_refs = data.item[ATTR_EXCLUDED] || [] diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb index d89e4e95..a1e4b326 100644 --- a/lib/ldclient-rb/impl/integrations/file_data_source.rb +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -48,7 +48,7 @@ def initialized? def start ready = Concurrent::Event.new - + # We will return immediately regardless of whether the file load succeeded or failed - # the difference can be detected by checking "initialized?" ready.set @@ -63,7 +63,7 @@ def start ready end - + def stop @listener.stop if !@listener.nil? end @@ -73,7 +73,7 @@ def stop def load_all all_data = { FEATURES => {}, - SEGMENTS => {} + SEGMENTS => {}, } @paths.each do |path| begin @@ -134,7 +134,7 @@ def make_flag_with_value(key, value) key: key, on: true, fallthrough: { variation: 0 }, - variations: [ value ] + variations: [ value ], } end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 193a50da..9bda5460 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -135,6 +135,7 @@ def upsert_internal(kind, new_item) else final_item = old_item action = new_item[:deleted] ? "delete" : "update" + # rubocop:disable Layout/LineLength @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index 5fe93a2b..a3f28d09 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -1,7 +1,7 @@ module LaunchDarkly module Impl module Util - def self.is_bool(aObject) + def self.bool?(aObject) [true,false].include? aObject end diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 576d90c7..dcef4529 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -14,13 +14,13 @@ module LaunchDarkly FEATURES = { namespace: "features", priority: 1, # that is, features should be stored after segments - get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } } + get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } }, }.freeze # @private SEGMENTS = { namespace: "segments", - priority: 0 + priority: 0, }.freeze # diff --git a/lib/ldclient-rb/integrations/file_data.rb b/lib/ldclient-rb/integrations/file_data.rb index 370d3aa6..b13128a2 100644 --- a/lib/ldclient-rb/integrations/file_data.rb +++ b/lib/ldclient-rb/integrations/file_data.rb @@ -78,7 +78,7 @@ module Integrations # same flag key or segment key more than once, either in a single file or across multiple files. # # If the data source encounters any error in any file-- malformed content, a missing file, or a - # duplicate key-- it will not load flags from any of the files. + # duplicate key-- it will not load flags from any of the files. # module FileData # diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 8cbcc980..f04d75d4 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -195,7 +195,7 @@ def make_init_data @lock.with_read_lock do { FEATURES => @current_flags.clone, - SEGMENTS => @current_segments.clone + SEGMENTS => @current_segments.clone, } end end diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index 79d6247b..0d3d08ca 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -56,7 +56,7 @@ def on(on) # @return [FlagBuilder] the builder # def fallthrough_variation(variation) - if LaunchDarkly::Impl::Util.is_bool variation then + if LaunchDarkly::Impl::Util.bool? variation then boolean_flag.fallthrough_variation(variation_for_boolean(variation)) else @fallthrough_variation = variation @@ -76,7 +76,7 @@ def fallthrough_variation(variation) # @return [FlagBuilder] the builder # def off_variation(variation) - if LaunchDarkly::Impl::Util.is_bool variation then + if LaunchDarkly::Impl::Util.bool? variation then boolean_flag.off_variation(variation_for_boolean(variation)) else @off_variation = variation @@ -121,7 +121,7 @@ def variations(*variations) # @return [FlagBuilder] the builder # def variation_for_all_users(variation) - if LaunchDarkly::Impl::Util.is_bool variation then + if LaunchDarkly::Impl::Util.bool? variation then boolean_flag.variation_for_all_users(variation_for_boolean(variation)) else on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) @@ -158,7 +158,7 @@ def value_for_all_users(value) # @return [FlagBuilder] the builder # def variation_for_user(user_key, variation) - if LaunchDarkly::Impl::Util.is_bool variation then + if LaunchDarkly::Impl::Util.bool? variation then boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) else if @targets.nil? then @@ -261,7 +261,7 @@ def add_rule(rule) # @return [FlagBuilder] the builder # def boolean_flag - if is_boolean_flag then + if boolean_flag? then self else variations(true, false) @@ -287,13 +287,13 @@ def build(version) end unless @targets.nil? then - res[:targets] = @targets.collect do | variation, values | + res[:targets] = @targets.map do | variation, values | { variation: variation, values: values } end end unless @rules.nil? then - res[:rules] = @rules.each_with_index.collect { | rule, i | rule.build(i) } + res[:rules] = @rules.each_with_index.map { | rule, i | rule.build(i) } end res @@ -386,7 +386,7 @@ def and_not_match(attribute, *values) # @return [FlagBuilder] the flag builder with this rule added # def then_return(variation) - if LaunchDarkly::Impl::Util.is_bool variation then + if LaunchDarkly::Impl::Util.bool? variation then @variation = @flag_builder.variation_for_boolean(variation) @flag_builder.boolean_flag.add_rule(self) else @@ -400,7 +400,7 @@ def build(ri) { id: 'rule' + ri.to_s, variation: @variation, - clauses: @clauses.collect(&:to_h) + clauses: @clauses.map(&:to_h), } end end @@ -415,10 +415,10 @@ def variation_for_boolean(variation) TRUE_VARIATION_INDEX = 0 FALSE_VARIATION_INDEX = 1 - def is_boolean_flag + def boolean_flag? @variations.size == 2 && - @variations[TRUE_VARIATION_INDEX] == true && - @variations[FALSE_VARIATION_INDEX] == false + @variations[TRUE_VARIATION_INDEX] == true && + @variations[FALSE_VARIATION_INDEX] == false end def deep_copy_hash(from) diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index c94ace94..4bb22b0e 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -22,7 +22,7 @@ module Util # class CachingStoreWrapper include LaunchDarkly::Interfaces::FeatureStore - + # # Creates a new store wrapper instance. # diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 3fd524ab..f74e4dd6 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -353,7 +353,7 @@ def all_flags_state(user, options={}) Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) end - requires_experiment_data = is_experiment(f, detail.reason) + requires_experiment_data = experiment?(f, detail.reason) flag_state = { key: f[:key], value: detail.value, @@ -433,7 +433,7 @@ def evaluate_internal(key, user, default, with_reasons) @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) record_unknown_flag_eval(key, user, default, detail.reason, with_reasons) - return detail + return detail end end @@ -468,7 +468,7 @@ def evaluate_internal(key, user, default, with_reasons) end private def record_flag_eval(flag, user, detail, default, with_reasons) - add_experiment_data = is_experiment(flag, detail.reason) + add_experiment_data = experiment?(flag, detail.reason) @event_processor.record_eval_event( user, flag[:key], @@ -482,9 +482,9 @@ def evaluate_internal(key, user, default, with_reasons) nil ) end - + private def record_prereq_flag_eval(prereq_flag, prereq_of_flag, user, detail, with_reasons) - add_experiment_data = is_experiment(prereq_flag, detail.reason) + add_experiment_data = experiment?(prereq_flag, detail.reason) @event_processor.record_eval_event( user, prereq_flag[:key], @@ -498,7 +498,7 @@ def evaluate_internal(key, user, default, with_reasons) prereq_of_flag[:key] ) end - + private def record_flag_eval_error(flag, user, default, reason, with_reasons) @event_processor.record_eval_event(user, flag[:key], flag[:version], nil, default, with_reasons ? reason : nil, default, flag[:trackEvents], flag[:debugEventsUntilDate], nil) @@ -509,7 +509,7 @@ def evaluate_internal(key, user, default, with_reasons) false, nil, nil) end - private def is_experiment(flag, reason) + private def experiment?(flag, reason) return false if !reason if reason.in_experiment diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index f13a63db..bf5c2e3d 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -33,7 +33,7 @@ def request_all_data() all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true) Impl::Model.make_all_store_data(all_data) end - + def stop begin @http_client.close @@ -57,7 +57,7 @@ def make_request(path) headers["If-None-Match"] = cached.etag end response = @http_client.request("GET", uri, { - headers: headers + headers: headers, }) status = response.status.code # must fully read body for persistent connections diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 211e6321..f99573b5 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -17,7 +17,7 @@ module LaunchDarkly # @private KEY_PATHS = { FEATURES => "/flags/", - SEGMENTS => "/segments/" + SEGMENTS => "/segments/", } # @private @@ -41,14 +41,14 @@ def start return @ready unless @started.make_true @config.logger.info { "[LDClient] Initializing stream connection" } - + headers = Impl::Util.default_http_headers(@sdk_key, @config) opts = { headers: headers, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger, socket_factory: @config.socket_factory, - reconnect_time: @config.initial_reconnect_delay + reconnect_time: @config.initial_reconnect_delay, } log_connection_started @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| @@ -67,7 +67,7 @@ def start end } end - + @ready end diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index b67f6844..c5fc45e7 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -11,7 +11,7 @@ def initialize(config) def transform_user_props(user_props) return nil if user_props.nil? - + user_private_attrs = Set.new((user_props[:privateAttributeNames] || []).map(&:to_sym)) filtered_user_props, removed = filter_values(user_props, user_private_attrs, ALLOWED_TOP_LEVEL_KEYS, IGNORED_TOP_LEVEL_KEYS) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 5aac9d1e..ede16bd3 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -30,13 +30,13 @@ def self.new_http_client(uri_s, config) proxy_address: proxy.host, proxy_port: proxy.port, proxy_username: proxy.user, - proxy_password: proxy.password + proxy_password: proxy.password, } end return HTTP::Client.new(http_client_options) .timeout({ read: config.read_timeout, - connect: config.connect_timeout + connect: config.connect_timeout, }) .persistent(uri_s) end diff --git a/spec/big_segment_store_spec_base.rb b/spec/big_segment_store_spec_base.rb index 29f344a1..5fb874b5 100644 --- a/spec/big_segment_store_spec_base.rb +++ b/spec/big_segment_store_spec_base.rb @@ -31,7 +31,7 @@ prefix_test_groups = [ ["with default prefix", {}], - ["with specified prefix", { prefix: "testprefix" }] + ["with specified prefix", { prefix: "testprefix" }], ] prefix_test_groups.each do |subgroup_description, prefix_options| context(subgroup_description) do @@ -52,56 +52,56 @@ def with_empty_store expected_timestamp = 1234567890 with_empty_store do |store| store_tester.set_big_segments_metadata(LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(expected_timestamp)) - + actual = store.get_metadata - + expect(actual).not_to be nil expect(actual.last_up_to_date).to eq(expected_timestamp) end end - + it "no value" do with_empty_store do |store| actual = store.get_metadata - + expect(actual).not_to be nil expect(actual.last_up_to_date).to be nil end end end - + context "get_membership" do it "not found" do with_empty_store do |store| membership = store.get_membership(fake_user_hash) membership = {} if membership.nil? - + expect(membership).to eq({}) end end - + it "includes only" do with_empty_store do |store| store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], []) - + membership = store.get_membership(fake_user_hash) expect(membership).to eq({ "key1" => true, "key2" => true }) end end - + it "excludes only" do with_empty_store do |store| store_tester.set_big_segments(fake_user_hash, [], ["key1", "key2"]) - + membership = store.get_membership(fake_user_hash) expect(membership).to eq({ "key1" => false, "key2" => false }) end end - + it "includes and excludes" do with_empty_store do |store| store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], ["key2", "key3"]) - + membership = store.get_membership(fake_user_hash) expect(membership).to eq({ "key1" => true, "key2" => true, "key3" => false }) # include of key2 overrides exclude end diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index 7e1bce7f..d8dc752c 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -38,7 +38,7 @@ def expected_default_config userKeysCapacity: Config.default_user_keys_capacity, userKeysFlushIntervalMillis: Config.default_user_keys_flush_interval * 1000, usingProxy: false, - usingRelayDaemon: false + usingRelayDaemon: false, } end @@ -70,7 +70,7 @@ def expected_default_config [ { stream: false }, { streamingDisabled: true } ], [ { user_keys_capacity: 999 }, { userKeysCapacity: 999 } ], [ { user_keys_flush_interval: 999 }, { userKeysFlushIntervalMillis: 999000 } ], - [ { use_ldd: true }, { usingRelayDaemon: true } ] + [ { use_ldd: true }, { usingRelayDaemon: true } ], ] changes_and_expected.each do |config_values, expected_values| config = Config.new(config_values) @@ -95,7 +95,7 @@ def expected_default_config event = default_acc.create_init_event(Config.new) expect(event[:sdk]).to eq ({ name: 'ruby-server-sdk', - version: LaunchDarkly::VERSION + version: LaunchDarkly::VERSION, }) end @@ -105,14 +105,14 @@ def expected_default_config name: 'ruby-server-sdk', version: LaunchDarkly::VERSION, wrapperName: 'my-wrapper', - wrapperVersion: '2.0' + wrapperVersion: '2.0', }) end it "has expected platform data" do event = default_acc.create_init_event(Config.new) expect(event[:platform]).to include ({ - name: 'ruby' + name: 'ruby', }) end end @@ -127,7 +127,7 @@ def expected_default_config droppedEvents: 2, deduplicatedUsers: 3, eventsInLastBatch: 4, - streamInits: [] + streamInits: [], }) expect(event[:creationDate]).not_to be_nil expect(event[:dataSinceDate]).not_to be_nil @@ -149,14 +149,14 @@ def expected_default_config droppedEvents: 2, deduplicatedUsers: 3, eventsInLastBatch: 4, - streamInits: [{ timestamp: 1000, failed: false, durationMillis: 2000 }] + streamInits: [{ timestamp: 1000, failed: false, durationMillis: 2000 }], }) expect(event2).to include ({ dataSinceDate: event1[:creationDate], droppedEvents: 5, deduplicatedUsers: 6, eventsInLastBatch: 7, - streamInits: [] + streamInits: [], }) end end diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 72d19197..f92c8eab 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -44,7 +44,7 @@ def with_sender_and_server "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], "x-launchdarkly-event-schema" => [ "3" ], - "connection" => [ "Keep-Alive" ] + "connection" => [ "Keep-Alive" ], }) expect(req.header['x-launchdarkly-payload-id']).not_to eq [] end @@ -101,7 +101,7 @@ def with_sender_and_server "authorization" => [ sdk_key ], "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], - "connection" => [ "Keep-Alive" ] + "connection" => [ "Keep-Alive" ], }) expect(req.header['x-launchdarkly-event-schema']).to eq [] expect(req.header['x-launchdarkly-payload-id']).to eq [] @@ -125,7 +125,7 @@ def with_sender_and_server es = make_sender_with_events_uri(fake_target_uri) result = es.send_event_data(fake_data, "", false) - + expect(result.success).to be true ensure ENV["http_proxy"] = nil @@ -135,7 +135,7 @@ def with_sender_and_server expect(body).to eq fake_data end end - + [400, 408, 429, 500].each do |status| it "handles recoverable error #{status}" do with_sender_and_server do |es, server| diff --git a/spec/events_spec.rb b/spec/events_spec.rb index c1449720..df84cd1e 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -27,7 +27,7 @@ def with_processor_and_sender(config) t = timestamp timestamp += 1 t - } + }, }) begin yield ep, sender @@ -201,7 +201,7 @@ def with_processor_and_sender(config) # Send and flush an event we don't care about, just to set the last server time sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) - + ep.record_identify_event(user) flush_and_get_events(ep, sender) @@ -280,16 +280,16 @@ def with_processor_and_sender(config) flagkey1: { default: "default1", counters: [ - { version: 11, variation: 1, value: "value1", count: 1 } - ] + { version: 11, variation: 1, value: "value1", count: 1 }, + ], }, flagkey2: { default: "default2", counters: [ - { version: 22, variation: 2, value: "value2", count: 1 } - ] - } - } + { version: 22, variation: 2, value: "value2", count: 1 }, + ], + }, + }, }) ) end @@ -356,7 +356,7 @@ def with_processor_and_sender(config) it "does a final flush when shutting down" do with_processor_and_sender(default_config) do |ep, sender| ep.record_identify_event(user) - + ep.stop output = sender.analytics_payloads.pop @@ -406,7 +406,7 @@ def with_diagnostic_processor_and_sender(config) event = sender.diagnostic_payloads.pop expect(event).to include({ kind: 'diagnostic-init', - id: default_id + id: default_id, }) end end @@ -421,7 +421,7 @@ def with_diagnostic_processor_and_sender(config) droppedEvents: 0, deduplicatedUsers: 0, eventsInLastBatch: 0, - streamInits: [] + streamInits: [], }) end end @@ -440,7 +440,7 @@ def with_diagnostic_processor_and_sender(config) expect(periodic_event).to include({ kind: 'diagnostic', droppedEvents: 1, - eventsInLastBatch: 2 + eventsInLastBatch: 2, }) end end @@ -456,7 +456,7 @@ def with_diagnostic_processor_and_sender(config) periodic_event = sender.diagnostic_payloads.pop expect(periodic_event).to include({ kind: 'diagnostic', - deduplicatedUsers: 1 + deduplicatedUsers: 1, }) end end @@ -466,7 +466,7 @@ def index_event(user, timestamp = starting_timestamp) { kind: "index", creationDate: timestamp, - user: user + user: user, } end @@ -475,7 +475,7 @@ def identify_event(user, timestamp = starting_timestamp) kind: "identify", creationDate: timestamp, key: user[:key], - user: user + user: user, } end @@ -486,7 +486,7 @@ def feature_event(flag, user, variation, value, inline_user = false, timestamp = key: flag[:key], variation: variation, version: flag[:version], - value: value + value: value, } if inline_user out[:user] = user @@ -504,7 +504,7 @@ def debug_event(flag, user, variation, value, timestamp = starting_timestamp) variation: variation, version: flag[:version], value: value, - user: user + user: user, } out end @@ -513,7 +513,7 @@ def custom_event(user, key, data, metric_value, inline_user = false, timestamp = out = { kind: "custom", creationDate: timestamp, - key: key + key: key, } out[:data] = data if !data.nil? if inline_user diff --git a/spec/expiring_cache_spec.rb b/spec/expiring_cache_spec.rb index ed021c34..7d757acf 100644 --- a/spec/expiring_cache_spec.rb +++ b/spec/expiring_cache_spec.rb @@ -10,7 +10,7 @@ after(:each) do Timecop.return end - + it "evicts entries based on TTL" do c = subject.new(3, 300) c[:a] = 1 @@ -45,7 +45,7 @@ expect(c[:a]).to be nil expect(c[:b]).to eq 2 - expect(c[:c]).to eq 3 + expect(c[:c]).to eq 3 end it "resets LRU on put" do @@ -57,7 +57,7 @@ expect(c[:a]).to eq 1 expect(c[:b]).to be nil - expect(c[:c]).to eq 3 + expect(c[:c]).to eq 3 end it "resets TTL on put" do diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 78fc8596..11df5969 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -41,7 +41,7 @@ key: $key1, name: "Thing 1", version: 11, - deleted: false + deleted: false, } $unused_key = "no" @@ -111,7 +111,7 @@ def new_version_plus(f, deltaVersion, attrs = {}) key: key2, name: "Thing 2", version: 22, - deleted: false + deleted: false, } with_inited_store([ $thing1, thing2 ]) do |store| expect(store.all($things_kind)).to eq ({ $key1.to_sym => $thing1, key2.to_sym => thing2 }) @@ -124,7 +124,7 @@ def new_version_plus(f, deltaVersion, attrs = {}) key: key2, name: "Thing 2", version: 22, - deleted: true + deleted: true, } with_inited_store([ $thing1, thing2 ]) do |store| expect(store.all($things_kind)).to eq ({ $key1.to_sym => $thing1 }) @@ -188,7 +188,7 @@ def new_version_plus(f, deltaVersion, attrs = {}) key: "my-fancy-flag", name: "Tęst Feåtūre Flæg😺", version: 1, - deleted: false + deleted: false, } with_inited_store([]) do |store| store.upsert(LaunchDarkly::FEATURES, flag) @@ -208,11 +208,11 @@ def new_version_plus(f, deltaVersion, attrs = {}) caching_test_groups = [ ["with caching", { expiration: 60 }], - ["without caching", { expiration: 0 }] + ["without caching", { expiration: 0 }], ] prefix_test_groups = [ ["with default prefix", {}], - ["with specified prefix", { prefix: "testprefix" }] + ["with specified prefix", { prefix: "testprefix" }], ] caching_test_groups.each do |test_group_description, caching_options| @@ -226,7 +226,7 @@ def new_version_plus(f, deltaVersion, attrs = {}) store_tester = store_tester_class.new(base_options) before(:each) { store_tester.clear_data } - + include_examples "any_feature_store", store_tester it "can detect if another instance has initialized the store" do diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 323c6c31..006fb88f 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -31,6 +31,7 @@ it "can be converted to JSON structure" do state = subject.new(true) flag_state1 = { key: "key1", version: 100, trackEvents: false, value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + # rubocop:disable Layout/LineLength flag_state2 = { key: "key2", version: 200, trackEvents: true, debugEventsUntilDate: 1000, value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } state.add_flag(flag_state1, false, false) state.add_flag(flag_state2, false, false) @@ -42,22 +43,23 @@ '$flagsState' => { 'key1' => { :variation => 0, - :version => 100 + :version => 100, }, 'key2' => { :variation => 1, :version => 200, :trackEvents => true, - :debugEventsUntilDate => 1000 - } + :debugEventsUntilDate => 1000, + }, }, - '$valid' => true + '$valid' => true, }) end it "can be converted to JSON string" do state = subject.new(true) flag_state1 = { key: "key1", version: 100, trackEvents: false, value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + # rubocop:disable Layout/LineLength flag_state2 = { key: "key2", version: 200, trackEvents: true, debugEventsUntilDate: 1000, value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } state.add_flag(flag_state1, false, false) state.add_flag(flag_state2, false, false) @@ -70,6 +72,7 @@ it "uses our custom serializer with JSON.generate" do state = subject.new(true) flag_state1 = { key: "key1", version: 100, trackEvents: false, value: 'value1', variation: 0, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } + # rubocop:disable Layout/LineLength flag_state2 = { key: "key2", version: 200, trackEvents: true, debugEventsUntilDate: 1000, value: 'value2', variation: 1, reason: LaunchDarkly::EvaluationReason.fallthrough(false) } state.add_flag(flag_state1, false, false) state.add_flag(flag_state2, false, false) diff --git a/spec/http_util.rb b/spec/http_util.rb index 1a789772..a2aad72b 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -15,7 +15,7 @@ def initialize Port: @port, AccessLog: [], Logger: NullLogger.new, - RequestCallback: method(:record_request) + RequestCallback: method(:record_request), } @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE @@ -100,7 +100,7 @@ def create_server(port, base_opts) res.status = @connect_status end @request_count += 1 - end + end, })) end end diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb index 89637653..2eebf843 100644 --- a/spec/impl/big_segments_spec.rb +++ b/spec/impl/big_segments_spec.rb @@ -124,7 +124,7 @@ def with_manager(config) memberships = { user_hash_1 => { 'seg1': true }, user_hash_2 => { 'seg2': true }, - user_hash_3 => { 'seg3': true } + user_hash_3 => { 'seg3': true }, } queried_users = [] store = double @@ -142,7 +142,7 @@ def with_manager(config) expect(result1).to eq(BigSegmentMembershipResult.new(memberships[user_hash_1], BigSegmentsStatus::HEALTHY)) expect(result2).to eq(BigSegmentMembershipResult.new(memberships[user_hash_2], BigSegmentsStatus::HEALTHY)) expect(result3).to eq(BigSegmentMembershipResult.new(memberships[user_hash_3], BigSegmentsStatus::HEALTHY)) - + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) # Since the capacity is only 2 and user_key_1 was the least recently used, that key should be @@ -158,7 +158,7 @@ def with_manager(config) result1a = m.get_user_membership(user_key_1) expect(result1a).to eq(result1) - + expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3, user_hash_1]) end end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index 32db7d79..fca9f95c 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -14,11 +14,11 @@ module Impl included: [ user[:key] ], # included should be ignored for a big segment version: 1, unbounded: true, - generation: 1 + generation: 1, } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be false @@ -30,11 +30,11 @@ module Impl key: 'test', included: [ user[:key] ], # included should be ignored for a big segment version: 1, - unbounded: true + unbounded: true, } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be false @@ -46,12 +46,12 @@ module Impl key: 'test', version: 1, unbounded: true, - generation: 2 + generation: 2, } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, true). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, true) + .build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be true @@ -65,13 +65,13 @@ module Impl unbounded: true, generation: 2, rules: [ - { clauses: [ make_user_matching_clause(user) ] } - ] + { clauses: [ make_user_matching_clause(user) ] }, + ], } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, nil). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, nil) + .build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be true @@ -85,13 +85,13 @@ module Impl unbounded: true, generation: 2, rules: [ - { clauses: [ make_user_matching_clause(user) ] } - ] + { clauses: [ make_user_matching_clause(user) ] }, + ], }; - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, false). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, false) + .build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be false @@ -103,13 +103,13 @@ module Impl key: 'test', version: 1, unbounded: true, - generation: 2 + generation: 2, } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, true). - with_big_segments_status(BigSegmentsStatus::STALE). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, true) + .with_big_segments_status(BigSegmentsStatus::STALE) + .build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be true @@ -121,13 +121,13 @@ module Impl key: 'segmentkey1', version: 1, unbounded: true, - generation: 2 + generation: 2, } segment2 = { key: 'segmentkey2', version: 1, unbounded: true, - generation: 3 + generation: 3, } flag = { key: 'key', @@ -136,19 +136,19 @@ module Impl variations: [ false, true ], rules: [ { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, - { variation: 1, clauses: [ make_segment_match_clause(segment2) ]} - ] + { variation: 1, clauses: [ make_segment_match_clause(segment2) ]}, + ], } - + queries = [] - e = EvaluatorBuilder.new(logger). - with_segment(segment1).with_segment(segment2). - with_big_segment_for_user(user, segment2, true). - record_big_segments_queries(queries). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment1).with_segment(segment2) + .with_big_segment_for_user(user, segment2, true) + .record_big_segments_queries(queries) + .build # The membership deliberately does not include segment1, because we want the first rule to be # a non-match so that it will continue on and check segment2 as well. - + result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index 98dbd13d..b0a94acb 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -64,8 +64,8 @@ key: "userkey", custom: { stringAttr: "33333", - intAttr: 33333 - } + intAttr: 33333, + }, } stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA", nil) intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA", nil) @@ -78,8 +78,8 @@ user = { key: "userkey", custom: { - floatAttr: 33.5 - } + floatAttr: 33.5, + }, } result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA", nil) expect(result).to eq(0.0) @@ -90,8 +90,8 @@ user = { key: "userkey", custom: { - boolAttr: true - } + boolAttr: true, + }, } result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA", nil) expect(result).to eq(0.0) @@ -119,9 +119,9 @@ variations: [ { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value - { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } - ] - } + { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) }, + ], + }, } flag = { key: flag_key, salt: salt } @@ -141,9 +141,9 @@ rule = { rollout: { variations: [ - { variation: 0, weight: bucket_value } - ] - } + { variation: 0, weight: bucket_value }, + ], + }, } flag = { key: flag_key, salt: salt } @@ -163,7 +163,7 @@ salt = "salt" seed = 61 - + rule = { rollout: { seed: seed, @@ -171,9 +171,9 @@ variations: [ { variation: 0, weight: 10000, untracked: false }, { variation: 2, weight: 20000, untracked: false }, - { variation: 0, weight: 70000 , untracked: true } - ] - } + { variation: 0, weight: 70000 , untracked: true }, + ], + }, } flag = { key: flag_key, salt: salt } @@ -202,9 +202,9 @@ seed: seed, kind: 'experiment', variations: [ - { variation: 0, weight: bucket_value, untracked: false } - ] - } + { variation: 0, weight: bucket_value, untracked: false }, + ], + }, } flag = { key: flag_key, salt: salt } diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index 5c447e6f..f57f5677 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -40,16 +40,16 @@ [ :contains, "y", "xyz", false ], # mixed strings and numbers - [ :in, "99", 99, false ], + [ :in, "99", 99, false ], [ :in, 99, "99", false ], - [ :contains, "99", 99, false ], - [ :startsWith, "99", 99, false ], - [ :endsWith, "99", 99, false ], - [ :lessThanOrEqual, "99", 99, false ], - [ :lessThanOrEqual, 99, "99", false ], - [ :greaterThanOrEqual, "99", 99, false ], - [ :greaterThanOrEqual, 99, "99", false ], - + [ :contains, "99", 99, false ], + [ :startsWith, "99", 99, false ], + [ :endsWith, "99", 99, false ], + [ :lessThanOrEqual, "99", 99, false ], + [ :lessThanOrEqual, 99, "99", false ], + [ :greaterThanOrEqual, "99", 99, false ], + [ :greaterThanOrEqual, 99, "99", false ], + # regex [ :matches, "hello world", "hello.*rld", true ], [ :matches, "hello world", "hello.*orl", true ], @@ -90,7 +90,7 @@ [ :semVerGreaterThan, "2.0", "2.0.1", false ], [ :semVerGreaterThan, "2.0.0-rc.1", "2.0.0-rc.0", true ], [ :semVerLessThan, "2.0.1", "xbad%ver", false ], - [ :semVerGreaterThan, "2.0.1", "xbad%ver", false ] + [ :semVerGreaterThan, "2.0.1", "xbad%ver", false ], ] operatorTests.each do |params| diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index bb526b7c..12908118 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -18,7 +18,7 @@ def test_segment_match(segment) key: 'segkey', included: [ 'userkey' ], version: 1, - deleted: false + deleted: false, } e = EvaluatorBuilder.new(logger).with_segment(segment).build flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) @@ -54,7 +54,7 @@ def test_segment_match(segment) it 'matches user by rule when weight is absent' do segClause = make_user_matching_clause(user, :email) segRule = { - clauses: [ segClause ] + clauses: [ segClause ], } segment = make_segment('segkey') segment[:rules] = [ segRule ] @@ -65,7 +65,7 @@ def test_segment_match(segment) segClause = make_user_matching_clause(user, :email) segRule = { clauses: [ segClause ], - weight: nil + weight: nil, } segment = make_segment('segkey') segment[:rules] = [ segRule ] @@ -76,7 +76,7 @@ def test_segment_match(segment) segClause = make_user_matching_clause(user, :email) segRule = { clauses: [ segClause ], - weight: 100000 + weight: 100000, } segment = make_segment('segkey') segment[:rules] = [ segRule ] @@ -87,7 +87,7 @@ def test_segment_match(segment) segClause = make_user_matching_clause(user, :email) segRule = { clauses: [ segClause ], - weight: 0 + weight: 0, } segment = make_segment('segkey') segment[:rules] = [ segRule ] @@ -98,7 +98,7 @@ def test_segment_match(segment) segClause1 = make_user_matching_clause(user, :email) segClause2 = make_user_matching_clause(user, :name) segRule = { - clauses: [ segClause1, segClause2 ] + clauses: [ segClause1, segClause2 ], } segment = make_segment('segkey') segment[:rules] = [ segRule ] @@ -110,7 +110,7 @@ def test_segment_match(segment) segClause2 = make_user_matching_clause(user, :name) segClause2[:values] = [ 'wrong' ] segRule = { - clauses: [ segClause1, segClause2 ] + clauses: [ segClause1, segClause2 ], } segment = make_segment('segkey') segment[:rules] = [ segRule ] diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 20b231fb..09e85aa9 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -14,7 +14,7 @@ module Impl on: false, offVariation: 1, fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::off) @@ -28,7 +28,7 @@ module Impl key: 'feature', on: false, fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::off) @@ -43,7 +43,7 @@ module Impl on: false, offVariation: 999, fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, @@ -59,7 +59,7 @@ module Impl on: false, offVariation: -1, fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, @@ -76,7 +76,7 @@ module Impl prerequisites: [{key: 'badfeature', variation: 1}], fallthrough: { variation: 0 }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) @@ -93,7 +93,7 @@ module Impl prerequisites: [{key: 'badfeature', variation: 1}], fallthrough: { variation: 0 }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason user = { key: 'x' } @@ -112,7 +112,7 @@ module Impl fallthrough: { variation: 0 }, offVariation: 1, variations: ['a', 'b', 'c'], - version: 1 + version: 1, } flag1 = { key: 'feature1', @@ -120,12 +120,12 @@ module Impl prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist fallthrough: { variation: 0 }, variations: ['d', 'e'], - version: 2 + version: 2, } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new(nil, nil, EvaluationReason::prerequisite_failed('feature2'))) + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new(nil, nil, EvaluationReason::prerequisite_failed('feature2'))), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build result = e.evaluate(flag, user) @@ -141,7 +141,7 @@ module Impl fallthrough: { variation: 0 }, offVariation: 1, variations: ['a', 'b', 'c'], - version: 1 + version: 1, } flag1 = { key: 'feature1', @@ -150,12 +150,12 @@ module Impl offVariation: 1, fallthrough: { variation: 0 }, variations: ['d', 'e'], - version: 2 + version: 2, } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::off)) + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::off)), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user) @@ -171,19 +171,19 @@ module Impl fallthrough: { variation: 0 }, offVariation: 1, variations: ['a', 'b', 'c'], - version: 1 + version: 1, } flag1 = { key: 'feature1', on: true, fallthrough: { variation: 0 }, variations: ['d', 'e'], - version: 2 + version: 2, } user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('d', 0, EvaluationReason::fallthrough)) + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('d', 0, EvaluationReason::fallthrough)), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user) @@ -199,19 +199,19 @@ module Impl fallthrough: { variation: 0 }, offVariation: 1, variations: ['a', 'b', 'c'], - version: 1 + version: 1, } flag1 = { key: 'feature1', on: true, fallthrough: { variation: 1 }, variations: ['d', 'e'], - version: 2 + version: 2, } user = { key: 'x' } detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::fallthrough)) + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::fallthrough)), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build result = e.evaluate(flag, user) @@ -225,7 +225,7 @@ module Impl on: true, fallthrough: { variation: 999 }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -240,7 +240,7 @@ module Impl on: true, fallthrough: { variation: -1 }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -255,7 +255,7 @@ module Impl on: true, fallthrough: { }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -270,7 +270,7 @@ module Impl on: true, fallthrough: { rollout: { variations: [] } }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -284,11 +284,11 @@ module Impl key: 'feature', on: true, targets: [ - { values: [ 'whoever', 'userkey' ], variation: 2 } + { values: [ 'whoever', 'userkey' ], variation: 2 }, ], fallthrough: { variation: 0 }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) @@ -302,9 +302,9 @@ module Impl flag = { key: 'feature', on: true, - fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user) @@ -316,9 +316,9 @@ module Impl flag = { key: 'feature', on: true, - fallthrough: { rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + fallthrough: { rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user) @@ -330,9 +330,9 @@ module Impl flag = { key: 'feature', on: true, - fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } }, + fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user) diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 6008c8b9..543ed992 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -79,7 +79,7 @@ def user { key: "userkey", email: "test@example.com", - name: "Bob" + name: "Bob", } end @@ -104,7 +104,7 @@ def make_user_matching_clause(user, attr = :key) attribute: attr.to_s, op: :in, values: [ user[attr.to_sym] ], - negate: false + negate: false, } end @@ -114,7 +114,7 @@ def make_segment(key) included: [], excluded: [], salt: 'abcdef', - version: 1 + version: 1, } end @@ -122,7 +122,7 @@ def make_segment_match_clause(segment) { op: :segmentMatch, values: [ segment[:key] ], - negate: false + negate: false, } end end diff --git a/spec/impl/event_summarizer_spec.rb b/spec/impl/event_summarizer_spec.rb index bbd3f2ba..2d824d91 100644 --- a/spec/impl/event_summarizer_spec.rb +++ b/spec/impl/event_summarizer_spec.rb @@ -58,24 +58,24 @@ module Impl 'default1', { 11 => { 1 => EventSummaryFlagVariationCounter.new('value1', 2), - 2 => EventSummaryFlagVariationCounter.new('value2', 1) - } + 2 => EventSummaryFlagVariationCounter.new('value2', 1), + }, } ), 'key2' => EventSummaryFlagInfo.new( 'default2', { 22 => { - 1 => EventSummaryFlagVariationCounter.new('value99', 1) - } + 1 => EventSummaryFlagVariationCounter.new('value99', 1), + }, } ), 'badkey' => EventSummaryFlagInfo.new( 'default3', { nil => { - nil => EventSummaryFlagVariationCounter.new('default3', 1) - } + nil => EventSummaryFlagVariationCounter.new('default3', 1), + }, } - ) + ), } expect(data.counters).to eq expectedCounters end diff --git a/spec/impl/repeating_task_spec.rb b/spec/impl/repeating_task_spec.rb index ba780d78..268d7320 100644 --- a/spec/impl/repeating_task_spec.rb +++ b/spec/impl/repeating_task_spec.rb @@ -10,7 +10,7 @@ module Impl def null_logger double().as_null_object end - + it "does not start when created" do signal = Concurrent::Event.new task = RepeatingTask.new(0.01, 0, -> { signal.set }, null_logger) diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index e73858fa..356f1679 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -7,7 +7,7 @@ $consul_base_opts = { prefix: $my_prefix, - logger: $null_log + logger: $null_log, } class ConsulStoreTester @@ -28,7 +28,7 @@ def create_feature_store describe "Consul feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' - + include_examples "persistent_feature_store", ConsulStoreTester end diff --git a/spec/integrations/dynamodb_stores_spec.rb b/spec/integrations/dynamodb_stores_spec.rb index 8f7c5c07..bf178dd0 100644 --- a/spec/integrations/dynamodb_stores_spec.rb +++ b/spec/integrations/dynamodb_stores_spec.rb @@ -13,12 +13,12 @@ class DynamoDBStoreTester DYNAMODB_OPTS = { credentials: Aws::Credentials.new("key", "secret"), region: "us-east-1", - endpoint: "http://localhost:8000" + endpoint: "http://localhost:8000", } FEATURE_STORE_BASE_OPTS = { dynamodb_opts: DYNAMODB_OPTS, prefix: 'testprefix', - logger: $null_log + logger: $null_log, } def initialize(options = {}) @@ -44,16 +44,16 @@ def self.create_table_if_necessary table_name: TABLE_NAME, key_schema: [ { attribute_name: "namespace", key_type: "HASH" }, - { attribute_name: "key", key_type: "RANGE" } + { attribute_name: "key", key_type: "RANGE" }, ], attribute_definitions: [ { attribute_name: "namespace", attribute_type: "S" }, - { attribute_name: "key", attribute_type: "S" } + { attribute_name: "key", attribute_type: "S" }, ], provisioned_throughput: { read_capacity_units: 1, - write_capacity_units: 1 - } + write_capacity_units: 1, + }, } client.create_table(req) @@ -68,8 +68,8 @@ def clear_data projection_expression: '#namespace, #key', expression_attribute_names: { '#namespace' => 'namespace', - '#key' => 'key' - } + '#key' => 'key', + }, } while true resp = client.scan(req) @@ -94,7 +94,7 @@ def create_feature_store def create_big_segment_store LaunchDarkly::Integrations::DynamoDB::new_big_segment_store(TABLE_NAME, @options) end - + def set_big_segments_metadata(metadata) client = self.class.create_test_client key = @actual_prefix + $DynamoDBBigSegmentStore::KEY_METADATA @@ -103,7 +103,7 @@ def set_big_segments_metadata(metadata) item: { "namespace" => key, "key" => key, - $DynamoDBBigSegmentStore::ATTR_SYNC_TIME => metadata.last_up_to_date + $DynamoDBBigSegmentStore::ATTR_SYNC_TIME => metadata.last_up_to_date, } ) end @@ -112,7 +112,7 @@ def set_big_segments(user_hash, includes, excludes) client = self.class.create_test_client sets = { $DynamoDBBigSegmentStore::ATTR_INCLUDED => Set.new(includes), - $DynamoDBBigSegmentStore::ATTR_EXCLUDED => Set.new(excludes) + $DynamoDBBigSegmentStore::ATTR_EXCLUDED => Set.new(excludes), } sets.each do |attr_name, values| if !values.empty? @@ -120,11 +120,11 @@ def set_big_segments(user_hash, includes, excludes) table_name: TABLE_NAME, key: { "namespace" => @actual_prefix + $DynamoDBBigSegmentStore::KEY_USER_DATA, - "key" => user_hash + "key" => user_hash, }, update_expression: "ADD #{attr_name} :value", expression_attribute_values: { - ":value" => values + ":value" => values, } ) end diff --git a/spec/integrations/redis_stores_spec.rb b/spec/integrations/redis_stores_spec.rb index 4f26cbb0..ea01d7db 100644 --- a/spec/integrations/redis_stores_spec.rb +++ b/spec/integrations/redis_stores_spec.rb @@ -38,7 +38,7 @@ def create_feature_store def create_big_segment_store LaunchDarkly::Integrations::Redis.new_big_segment_store(@options) end - + def set_big_segments_metadata(metadata) with_redis_test_client do |client| client.set(@actual_prefix + $RedisBigSegmentStore::KEY_LAST_UP_TO_DATE, @@ -85,7 +85,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve flag = { key: "foo", version: 1 } test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) - + ensure_stop(tester.create_feature_store) do |store| store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb index 75418bd3..f72f33b9 100644 --- a/spec/integrations/test_data_spec.rb +++ b/spec/integrations/test_data_spec.rb @@ -22,7 +22,7 @@ module Integrations fallthrough: { variation: 0 }, offVariation: 1, on: true, - version: 1 + version: 1, }) client.close end @@ -41,7 +41,7 @@ module Integrations fallthrough: { variation: 0 }, offVariation: 1, on: true, - version: 1 + version: 1, }) expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', @@ -49,7 +49,7 @@ module Integrations fallthrough: { variation: 0 }, offVariation: 1, on: true, - version: 1 + version: 1, }) td.update(td.flag('flag').variation_for_all_users(false)) @@ -60,7 +60,7 @@ module Integrations fallthrough: { variation: 1 }, offVariation: 1, on: true, - version: 2 + version: 2, }) expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ key: 'flag', @@ -68,7 +68,7 @@ module Integrations fallthrough: { variation: 1 }, offVariation: 1, on: true, - version: 2 + version: 2, }) client.close @@ -231,8 +231,8 @@ module Integrations op: 'in', values: ['fr'], negate: true, - } - ] + }, + ], }]) end end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index 19c6c241..a76fa866 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -5,8 +5,8 @@ ALWAYS_TRUE_FLAG = { key: 'flagkey', version: 1, on: false, offVariation: 1, variations: [ false, true ] } DATA_WITH_ALWAYS_TRUE_FLAG = { - flags: { ALWAYS_TRUE_FLAG[:key ].to_sym => ALWAYS_TRUE_FLAG }, - segments: {} + flags: { ALWAYS_TRUE_FLAG[:key ].to_sym => ALWAYS_TRUE_FLAG }, + segments: {}, } PUT_EVENT_WITH_ALWAYS_TRUE_FLAG = "event: put\ndata:{\"data\":#{DATA_WITH_ALWAYS_TRUE_FLAG.to_json}}\n\n'" @@ -18,7 +18,7 @@ module LaunchDarkly it "starts in polling mode" do with_server do |poll_server| poll_server.setup_ok_response("/sdk/latest-all", DATA_WITH_ALWAYS_TRUE_FLAG.to_json, "application/json") - + with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be true expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be true @@ -29,7 +29,7 @@ module LaunchDarkly it "fails in polling mode with 401 error" do with_server do |poll_server| poll_server.setup_status_response("/sdk/latest-all", 401) - + with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be false expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be false @@ -40,7 +40,7 @@ module LaunchDarkly it "sends event without diagnostics" do with_server do |events_server| events_server.setup_ok_response("/bulk", "") - + config = test_config( send_events: true, events_uri: events_server.base_uri.to_s, @@ -64,7 +64,7 @@ module LaunchDarkly with_server do |events_server| events_server.setup_ok_response("/bulk", "") events_server.setup_ok_response("/diagnostic", "") - + config = test_config( send_events: true, events_uri: events_server.base_uri.to_s @@ -90,7 +90,7 @@ module LaunchDarkly with_server do |events_server| events_server.setup_ok_response("/bulk", "") poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - + config = test_config( stream: false, data_source: nil, @@ -100,7 +100,7 @@ module LaunchDarkly diagnostic_opt_out: true, socket_factory: SocketFactoryFromHash.new({ "fake-polling-server" => poll_server.port, - "fake-events-server" => events_server.port + "fake-events-server" => events_server.port, }) ) with_client(config) do |client| diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index 581f3256..6af21ba6 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -23,7 +23,7 @@ module LaunchDarkly it "returns the value for an existing feature" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - + with_client(test_config(data_source: td)) do |client| expect(client.variation("flagkey", basic_user, "default")).to eq "value" end @@ -34,7 +34,7 @@ module LaunchDarkly td.use_preconfigured_flag({ # TestData normally won't construct a flag with offVariation: nil key: "flagkey", on: false, - offVariation: nil + offVariation: nil, }) with_client(test_config(data_source: td)) do |client| @@ -98,7 +98,7 @@ module LaunchDarkly it "returns a value for an existing feature" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) - + with_client(test_config(data_source: td)) do |client| result = client.variation_detail("flagkey", basic_user, "default") expected = EvaluationDetail.new("value", 0, EvaluationReason::off) @@ -111,7 +111,7 @@ module LaunchDarkly td.use_preconfigured_flag({ # TestData normally won't construct a flag with offVariation: nil key: "flagkey", on: false, - offVariation: nil + offVariation: nil, }) with_client(test_config(data_source: td)) do |client| @@ -201,7 +201,7 @@ module LaunchDarkly values = state.values_map expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - + result = state.as_json expect(result).to eq({ 'key1' => 'value1', @@ -209,16 +209,16 @@ module LaunchDarkly '$flagsState' => { 'key1' => { :variation => 0, - :version => 100 + :version => 100, }, 'key2' => { :variation => 1, :version => 200, :trackEvents => true, - :debugEventsUntilDate => 1000 - } + :debugEventsUntilDate => 1000, + }, }, - '$valid' => true + '$valid' => true, }) end end @@ -252,7 +252,7 @@ module LaunchDarkly values = state.values_map expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) - + result = state.as_json expect(result).to eq({ 'key1' => 'value1', @@ -260,20 +260,20 @@ module LaunchDarkly 'key3' => 'value3', '$flagsState' => { 'key1' => { - :variation => 0 + :variation => 0, }, 'key2' => { :variation => 1, :version => 200, - :trackEvents => true + :trackEvents => true, }, 'key3' => { :variation => 1, :version => 300, - :debugEventsUntilDate => future_time - } + :debugEventsUntilDate => future_time, + }, }, - '$valid' => true + '$valid' => true, }) end end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index 4c77787b..8a4cca59 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -16,7 +16,7 @@ def event_processor(client) expect(event_processor(client)).to be_a(LaunchDarkly::NullEventProcessor) end end - + context "evaluation events - variation" do it "unknown flag" do with_client(test_config) do |client| @@ -30,7 +30,7 @@ def event_processor(client) it "known flag" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - + with_client(test_config(data_source: td)) do |client| expect(event_processor(client)).to receive(:record_eval_event).with( basic_user, 'flagkey', 1, 0, 'value', nil, 'default', false, nil, nil @@ -44,7 +44,7 @@ def event_processor(client) td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) logger = double().as_null_object - + with_client(test_config(data_source: td, logger: logger)) do |client| expect(event_processor(client)).not_to receive(:record_eval_event) expect(logger).to receive(:error) @@ -69,10 +69,10 @@ def event_processor(client) it "sets trackEvents and reason if trackEvents is set for matched rule" do td = Integrations::TestData.data_source td.use_preconfigured_flag( - FlagBuilder.new("flagkey").version(100).on(true).variations("value"). - rule(RuleBuilder.new.variation(0).id("id").track_events(true). - clause(Clauses.match_user(basic_user))). - build + FlagBuilder.new("flagkey").version(100).on(true).variations("value") + .rule(RuleBuilder.new.variation(0).id("id").track_events(true) + .clause(Clauses.match_user(basic_user))) + .build ) with_client(test_config(data_source: td)) do |client| @@ -87,8 +87,8 @@ def event_processor(client) it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do td = Integrations::TestData.data_source td.use_preconfigured_flag( - FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0). - track_events_fallthrough(true).build + FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0) + .track_events_fallthrough(true).build ) with_client(test_config(data_source: td)) do |client| @@ -153,7 +153,7 @@ def event_processor(client) end end - context "identify" do + context "identify" do it "queues up an identify event" do with_client(test_config) do |client| expect(event_processor(client)).to receive(:record_identify_event).with(basic_user) @@ -173,7 +173,7 @@ def event_processor(client) it "does not send event, and logs warning, if user key is blank" do logger = double().as_null_object - + with_client(test_config(logger: logger)) do |client| expect(event_processor(client)).not_to receive(:record_identify_event) expect(logger).to receive(:warn) @@ -182,7 +182,7 @@ def event_processor(client) end end - context "track" do + context "track" do it "queues up an custom event" do with_client(test_config) do |client| expect(event_processor(client)).to receive(:record_custom_event).with( diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index ef689deb..ad56b800 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -63,11 +63,11 @@ module LaunchDarkly c: { key: "c" }, d: { key: "d" }, e: { key: "e" }, - f: { key: "f" } + f: { key: "f" }, }, SEGMENTS => { - o: { key: "o" } - } + o: { key: "o" }, + }, } } @@ -76,12 +76,12 @@ module LaunchDarkly td = Integrations::TestData.data_source dependency_ordering_test_data[FEATURES].each { |key, flag| td.use_preconfigured_flag(flag) } dependency_ordering_test_data[SEGMENTS].each { |key, segment| td.use_preconfigured_segment(segment) } - + with_client(test_config(feature_store: store, data_source: td)) do |client| data = store.received_data expect(data).not_to be_nil expect(data.count).to eq(2) - + # Segments should always come first expect(data.keys[0]).to be(SEGMENTS) expect(data.values[0].count).to eq(dependency_ordering_test_data[SEGMENTS].count) diff --git a/spec/mock_components.rb b/spec/mock_components.rb index 07dd851a..a4bcc7b0 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -20,7 +20,7 @@ def base_config { data_source: null_data, send_events: false, - logger: null_logger + logger: null_logger, } end diff --git a/spec/model_builders.rb b/spec/model_builders.rb index a7c0bd6e..f8dd312c 100644 --- a/spec/model_builders.rb +++ b/spec/model_builders.rb @@ -5,7 +5,7 @@ def initialize(key) key: key, version: 1, variations: [ false ], - rules: [] + rules: [], } end @@ -27,7 +27,7 @@ def on(value) @flag[:on] = value self end - + def rule(r) @flag[:rules].append(r.build) self @@ -71,7 +71,7 @@ def initialize() @rule = { id: "", variation: 0, - clauses: [] + clauses: [], } end @@ -106,14 +106,14 @@ def initialize(key) key: key, version: 1, included: [], - excluded: [] + excluded: [], } end def build @segment.clone end - + def included(*keys) @segment[:included] = keys self @@ -140,7 +140,7 @@ def self.match_segment(segment) { "attribute": "", "op": "segmentMatch", - "values": [ segment.is_a?(Hash) ? segment[:key] : segment ] + "values": [ segment.is_a?(Hash) ? segment[:key] : segment ], } end @@ -148,7 +148,7 @@ def self.match_user(user) { "attribute": "key", "op": "in", - "values": [ user[:key] ] + "values": [ user[:key] ], } end end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index ca36364c..c8f801c2 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -20,11 +20,11 @@ def with_processor(store) segment = { key: 'segkey', version: 1 } all_data = { LaunchDarkly::FEATURES => { - flagkey: flag + flagkey: flag, }, LaunchDarkly::SEGMENTS => { - segkey: segment - } + segkey: segment, + }, } it 'puts feature data in store' do diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 65ec7ed3..934a34bc 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -23,7 +23,7 @@ def with_requestor(base_uri, opts = {}) expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-all" expect(server.requests[0].header).to include({ "authorization" => [ $sdk_key ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], }) end end @@ -79,12 +79,12 @@ def with_requestor(base_uri, opts = {}) requestor.request_all_data() expect(server.requests.count).to eq 1 expect(server.requests[0].header).to include({ - "x-launchdarkly-wrapper" => [ "MyWrapper/1.0" ] + "x-launchdarkly-wrapper" => [ "MyWrapper/1.0" ], }) end end end - + it "can reuse cached data" do etag = "xyz" expected_data = { flags: { x: { key: "x" } } } diff --git a/spec/segment_store_spec_base.rb b/spec/segment_store_spec_base.rb index 02ecd448..c3ddf82a 100644 --- a/spec/segment_store_spec_base.rb +++ b/spec/segment_store_spec_base.rb @@ -7,7 +7,7 @@ key: "test-segment", version: 11, salt: "718ea30a918a4eba8734b57ab1a93227", - rules: [] + rules: [], } } let(:key0) { segment0[:key].to_sym } From 6d9e42cf04959cf8541448abc0594afaa1390a1e Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 11 Jul 2022 14:30:32 -0400 Subject: [PATCH 246/292] Add windows tests in circleci (#199) At some point in the past, we were experimenting with using Azure to verify Window builds. Now that CircleCI supports Windows, we should keep everything on a single CI provider. --- .circleci/config.yml | 65 ++++++++++++++++++++++++++++++++++++++++++++ azure-pipelines.yml | 51 ---------------------------------- 2 files changed, 65 insertions(+), 51 deletions(-) delete mode 100644 azure-pipelines.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index 7ec25b1a..0e28431e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,9 +1,13 @@ version: 2.1 +orbs: + win: circleci/windows@4.1.1 + workflows: version: 2 test: jobs: + - build-test-windows - build-test-linux: name: Ruby 2.5 docker-image: cimg/ruby:2.5 @@ -22,6 +26,67 @@ workflows: jruby: true jobs: + build-test-windows: + executor: + name: win/default + + steps: + - checkout + + - run: + name: "Setup DynamoDB" + command: | + iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip + mkdir dynamo + Expand-Archive -Path dynamo.zip -DestinationPath dynamo + - run: + name: "Run DynamoDB" + background: true + working_directory: dynamo + command: javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar + + - run: + name: "Setup Consul" + command: | + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + mkdir consul + Expand-Archive -Path consul.zip -DestinationPath consul + sc.exe create "Consul" binPath="C:/Users/circleci/project/consul/consul.exe agent -dev" + - run: + name: "Run Consul" + background: true + working_directory: consul + command: sc.exe start "Consul" + + - run: + name: "Setup Redis" + command: | + iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip + mkdir redis + Expand-Archive -Path redis.zip -DestinationPath redis + cd redis + ./redis-server --service-install + - run: + name: "Run Redis" + background: true + working_directory: redis + command: | + ./redis-server --service-start + + - run: ruby -v + - run: choco install msys2 --allow-downgrade -y --version 20200903.0.0 + - run: ridk.cmd exec pacman -S --noconfirm --needed base-devel mingw-w64-x86_64-toolchain + + - run: gem install bundler -v 2.2.33 + - run: bundle _2.2.33_ install + - run: mkdir /tmp/circle-artifacts + - run: bundle _2.2.33_ exec rspec --format documentation --format RspecJunitFormatter -o /tmp/circle-artifacts/rspec.xml spec + + - store_test_results: + path: /tmp/circle-artifacts + - store_artifacts: + path: /tmp/circle-artifacts + build-test-linux: parameters: docker-image: diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 88296f02..00000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,51 +0,0 @@ -jobs: - - job: build - pool: - vmImage: 'vs2017-win2016' - steps: - - task: PowerShell@2 - displayName: 'Setup Dynamo' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip - mkdir dynamo - Expand-Archive -Path dynamo.zip -DestinationPath dynamo - cd dynamo - javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar - - task: PowerShell@2 - displayName: 'Setup Consul' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip - mkdir consul - Expand-Archive -Path consul.zip -DestinationPath consul - cd consul - sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" - sc.exe start "Consul" - - task: PowerShell@2 - displayName: 'Setup Redis' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip - mkdir redis - Expand-Archive -Path redis.zip -DestinationPath redis - cd redis - ./redis-server --service-install - ./redis-server --service-start - - task: PowerShell@2 - displayName: 'Setup SDK and Test' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - ruby -v - gem install bundler - bundle install - mkdir rspec - bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From 4c48d05f3db80c1993e597ceb47bcf02ea7de82d Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 1 Aug 2022 10:33:42 -0400 Subject: [PATCH 247/292] Add application info support (#194) --- contract-tests/client_entity.rb | 7 ++++ contract-tests/service.rb | 1 + lib/ldclient-rb/config.rb | 20 ++++++++++++ lib/ldclient-rb/impl/util.rb | 58 +++++++++++++++++++++++++++++++++ spec/config_spec.rb | 33 +++++++++++++++++++ spec/event_sender_spec.rb | 7 ++-- spec/requestor_spec.rb | 7 ++-- 7 files changed, 127 insertions(+), 6 deletions(-) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 2882068b..1f5f0fe2 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -34,6 +34,13 @@ def initialize(log, config) opts[:send_events] = false end + if config[:tags] + opts[:application] = { + :id => config[:tags][:applicationId], + :version => config[:tags][:applicationVersion], + } + end + startWaitTimeMs = config[:startWaitTimeMs] || 5_000 @client = LaunchDarkly::LDClient.new( diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 9534c25d..68b00288 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -29,6 +29,7 @@ 'all-flags-with-reasons', 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', + 'tags', ] }.to_json end diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index ed33e08b..15e302ea 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -44,6 +44,7 @@ class Config # @option opts [String] :wrapper_version See {#wrapper_version}. # @option opts [#open] :socket_factory See {#socket_factory}. # @option opts [BigSegmentsConfig] :big_segments See {#big_segments}. + # @option opts [Hash] :application See {#application} # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -77,6 +78,7 @@ def initialize(opts = {}) @wrapper_version = opts[:wrapper_version] @socket_factory = opts[:socket_factory] @big_segments = opts[:big_segments] || BigSegmentsConfig.new(store: nil) + @application = LaunchDarkly::Impl::Util.validate_application_info(opts[:application] || {}, @logger) end # @@ -284,6 +286,24 @@ def offline? # attr_reader :big_segments + # + # An object that allows configuration of application metadata. + # + # Application metadata may be used in LaunchDarkly analytics or other product features, but does not affect feature flag evaluations. + # + # If you want to set non-default values for any of these fields, provide the appropriately configured hash to the {Config} object. + # + # @example Configuring application information + # opts[:application] = { + # id: "MY APPLICATION ID", + # version: "MY APPLICATION VERSION" + # } + # config = LDConfig.new(opts) + # + # @return [Hash] + # + attr_reader :application + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index 5fe93a2b..165ce885 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -15,8 +15,66 @@ def self.default_http_headers(sdk_key, config) ret["X-LaunchDarkly-Wrapper"] = config.wrapper_name + (config.wrapper_version ? "/" + config.wrapper_version : "") end + + app_value = application_header_value config.application + ret["X-LaunchDarkly-Tags"] = app_value unless app_value.nil? || app_value.empty? + ret end + + # + # Generate an HTTP Header value containing the application meta information (@see #application). + # + # @return [String] + # + def self.application_header_value(application) + parts = [] + unless application[:id].empty? + parts << "application-id/#{application[:id]}" + end + + unless application[:version].empty? + parts << "application-version/#{application[:version]}" + end + + parts.join(" ") + end + + # + # @param value [String] + # @param name [Symbol] + # @param logger [Logger] + # @return [String] + # + def self.validate_application_value(value, name, logger) + value = value.to_s + + return "" if value.empty? + + if value.length > 64 + logger.warn { "Value of application[#{name}] was longer than 64 characters and was discarded" } + return "" + end + + if value.match(/[^a-zA-Z0-9._-]/) + logger.warn { "Value of application[#{name}] contained invalid characters and was discarded" } + return "" + end + + value + end + + # + # @param app [Hash] + # @param logger [Logger] + # @return [Hash] + # + def self.validate_application_info(app, logger) + { + id: validate_application_value(app[:id], :id, logger), + version: validate_application_value(app[:version], :version, logger), + } + end end end end diff --git a/spec/config_spec.rb b/spec/config_spec.rb index 30dcb8f8..2b66e8b9 100644 --- a/spec/config_spec.rb +++ b/spec/config_spec.rb @@ -60,4 +60,37 @@ expect(subject.new(poll_interval: 29).poll_interval).to eq 30 end end + + describe ".application" do + it "can be set and read" do + app = { id: "my-id", version: "abcdef" } + expect(subject.new(application: app).application).to eq app + end + + it "can handle non-string values" do + expect(subject.new(application: { id: 1, version: 2 }).application).to eq ({ id: "1", version: "2" }) + end + + it "will ignore invalid keys" do + expect(subject.new(application: { invalid: 1, hashKey: 2 }).application).to eq ({ id: "", version: "" }) + end + + it "will drop invalid values" do + [" ", "@", ":", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-a"]. each do |value| + expect(subject.new(logger: $null_log, application: { id: value, version: value }).application).to eq ({ id: "", version: "" }) + end + end + + it "will generate correct header tag value" do + [ + { :id => "id", :version => "version", :expected => "application-id/id application-version/version" }, + { :id => "id", :version => "", :expected => "application-id/id" }, + { :id => "", :version => "version", :expected => "application-version/version" }, + { :id => "", :version => "", :expected => "" } + ].each do |test_case| + config = subject.new(application: { id: test_case[:id], version: test_case[:version] }) + expect(LaunchDarkly::Impl::Util.application_header_value(config.application)).to eq test_case[:expected] + end + end + end end diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 72d19197..2b7fe38b 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -18,7 +18,7 @@ def make_sender(server) end def make_sender_with_events_uri(events_uri) - subject.new(sdk_key, Config.new(events_uri: events_uri, logger: $null_log), nil, 0.1) + subject.new(sdk_key, Config.new(events_uri: events_uri, logger: $null_log, application: {id: "id", version: "version"}), nil, 0.1) end def with_sender_and_server @@ -44,6 +44,7 @@ def with_sender_and_server "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], "x-launchdarkly-event-schema" => [ "3" ], + "x-launchdarkly-tags" => [ "application-id/id application-version/version" ], "connection" => [ "Keep-Alive" ] }) expect(req.header['x-launchdarkly-payload-id']).not_to eq [] @@ -125,7 +126,7 @@ def with_sender_and_server es = make_sender_with_events_uri(fake_target_uri) result = es.send_event_data(fake_data, "", false) - + expect(result.success).to be true ensure ENV["http_proxy"] = nil @@ -135,7 +136,7 @@ def with_sender_and_server expect(body).to eq fake_data end end - + [400, 408, 429, 500].each do |status| it "handles recoverable error #{status}" do with_sender_and_server do |es, server| diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 65ec7ed3..0851b4aa 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -5,7 +5,7 @@ describe LaunchDarkly::Requestor do def with_requestor(base_uri, opts = {}) - r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new({ base_uri: base_uri }.merge(opts))) + r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new({ base_uri: base_uri, application: {id: "id", version: "version"} }.merge(opts))) begin yield r ensure @@ -23,7 +23,8 @@ def with_requestor(base_uri, opts = {}) expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-all" expect(server.requests[0].header).to include({ "authorization" => [ $sdk_key ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "x-launchdarkly-tags" => [ "application-id/id application-version/version" ], }) end end @@ -84,7 +85,7 @@ def with_requestor(base_uri, opts = {}) end end end - + it "can reuse cached data" do etag = "xyz" expected_data = { flags: { x: { key: "x" } } } From cba083cec8d21e1d615b1fb6cbc1ea42b6f8d817 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 5 Aug 2022 13:47:00 -0700 Subject: [PATCH 248/292] reuse EvaluationDetail instances by precomputing results --- lib/ldclient-rb/impl/evaluator.rb | 59 ++-- lib/ldclient-rb/impl/evaluator_helpers.rb | 53 ++++ .../impl/model/preprocessed_data.rb | 177 +++++++++++ lib/ldclient-rb/impl/model/serialization.rb | 44 +-- lib/ldclient-rb/requestor.rb | 4 +- lib/ldclient-rb/stream.rb | 4 +- spec/impl/evaluator_big_segments_spec.rb | 284 +++++++++--------- spec/impl/evaluator_clause_spec.rb | 82 ++--- spec/impl/evaluator_rule_spec.rb | 232 +++++++------- spec/impl/evaluator_segment_spec.rb | 202 ++++++------- spec/impl/evaluator_spec.rb | 241 +++++++++++---- spec/impl/evaluator_spec_base.rb | 23 +- spec/impl/model/preprocessed_data_spec.rb | 45 +++ spec/impl/model/serialization_spec.rb | 13 +- spec/model_builders.rb | 46 ++- spec/requestor_spec.rb | 13 +- spec/stream_spec.rb | 11 +- 17 files changed, 985 insertions(+), 548 deletions(-) create mode 100644 lib/ldclient-rb/impl/evaluator_helpers.rb create mode 100644 lib/ldclient-rb/impl/model/preprocessed_data.rb create mode 100644 spec/impl/model/preprocessed_data_spec.rb diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index ed94719e..e8c9567d 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -1,5 +1,6 @@ require "ldclient-rb/evaluation_detail" require "ldclient-rb/impl/evaluator_bucketing" +require "ldclient-rb/impl/evaluator_helpers" require "ldclient-rb/impl/evaluator_operators" module LaunchDarkly @@ -87,19 +88,17 @@ def self.make_big_segment_ref(segment) # method is visible for testing def eval_internal(flag, user, state) if !flag[:on] - return get_off_value(flag, EvaluationReason::off) + return EvaluatorHelpers.off_result(flag) end - prereq_failure_reason = check_prerequisites(flag, user, state) - if !prereq_failure_reason.nil? - return get_off_value(flag, prereq_failure_reason) - end + prereq_failure_result = check_prerequisites(flag, user, state) + return prereq_failure_result if !prereq_failure_result.nil? # Check user target matches (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| if value == user[:key] - return get_variation(flag, target[:variation], EvaluationReason::target_match) + return EvaluatorHelpers.target_match_result(target, flag) end end end @@ -111,13 +110,15 @@ def eval_internal(flag, user, state) if rule_match_user(rule, user, state) reason = rule[:_reason] # try to use cached reason for this rule reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? - return get_value_for_variation_or_rollout(flag, rule, user, reason) + return get_value_for_variation_or_rollout(flag, rule, user, reason, + EvaluatorHelpers.rule_precomputed_results(rule)) end end # Check the fallthrough rule if !flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, EvaluationReason::fallthrough) + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, EvaluationReason::fallthrough, + EvaluatorHelpers.fallthrough_precomputed_results(flag)) end return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) @@ -149,8 +150,7 @@ def check_prerequisites(flag, user, state) end end if !prereq_ok - reason = prerequisite[:_reason] # try to use cached reason - return reason.nil? ? EvaluationReason::prerequisite_failed(prereq_key) : reason + return EvaluatorHelpers.prerequisite_failed_result(prerequisite, flag) end end nil @@ -253,35 +253,26 @@ def segment_rule_match_user(rule, user, segment_key, salt) end private - - def get_variation(flag, index, reason) - if index < 0 || index >= flag[:variations].length - @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") - return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) - end - EvaluationDetail.new(flag[:variations][index], index, reason) - end - - def get_off_value(flag, reason) - if flag[:offVariation].nil? # off variation unspecified - return default value - return EvaluationDetail.new(nil, nil, reason) - end - get_variation(flag, flag[:offVariation], reason) - end - - def get_value_for_variation_or_rollout(flag, vr, user, reason) + + def get_value_for_variation_or_rollout(flag, vr, user, reason, precomputed_results) index, in_experiment = EvaluatorBucketing.variation_index_for_user(flag, vr, user) - #if in experiment is true, set reason to a different reason instance/singleton with in_experiment set - if in_experiment && reason.kind == :FALLTHROUGH - reason = EvaluationReason::fallthrough(in_experiment) - elsif in_experiment && reason.kind == :RULE_MATCH - reason = EvaluationReason::rule_match(reason.rule_index, reason.rule_id, in_experiment) - end if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end - return get_variation(flag, index, reason) + if precomputed_results + return precomputed_results.for_variation(index, in_experiment) + else + #if in experiment is true, set reason to a different reason instance/singleton with in_experiment set + if in_experiment + if reason.kind == :FALLTHROUGH + reason = EvaluationReason::fallthrough(in_experiment) + elsif reason.kind == :RULE_MATCH + reason = EvaluationReason::rule_match(reason.rule_index, reason.rule_id, in_experiment) + end + end + return EvaluatorHelpers.evaluation_detail_for_variation(flag, index, reason) + end end end end diff --git a/lib/ldclient-rb/impl/evaluator_helpers.rb b/lib/ldclient-rb/impl/evaluator_helpers.rb new file mode 100644 index 00000000..9629a6aa --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator_helpers.rb @@ -0,0 +1,53 @@ +require "ldclient-rb/evaluation_detail" + +# This file contains any pieces of low-level evaluation logic that don't need to be inside the Evaluator +# class, because they don't depend on any SDK state outside of their input parameters. + +module LaunchDarkly + module Impl + module EvaluatorHelpers + def self.off_result(flag, logger = nil) + pre = flag[:_preprocessed] + pre ? pre.off_result : evaluation_detail_for_off_variation(flag, EvaluationReason::off, logger) + end + + def self.target_match_result(target, flag, logger = nil) + pre = target[:_preprocessed] + pre ? pre.match_result : evaluation_detail_for_variation( + flag, target[:variation], EvaluationReason::target_match, logger) + end + + def self.prerequisite_failed_result(prereq, flag, logger = nil) + pre = prereq[:_preprocessed] + pre ? pre.failed_result : evaluation_detail_for_off_variation( + flag, EvaluationReason::prerequisite_failed(prereq[:key]), logger + ) + end + + def self.fallthrough_precomputed_results(flag) + pre = flag[:_preprocessed] + pre ? pre.fallthrough_factory : nil + end + + def self.rule_precomputed_results(rule) + pre = rule[:_preprocessed] + pre ? pre.all_match_results : nil + end + + def self.evaluation_detail_for_off_variation(flag, reason, logger = nil) + index = flag[:offVariation] + index.nil? ? EvaluationDetail.new(nil, nil, reason) : evaluation_detail_for_variation(flag, index, reason, logger) + end + + def self.evaluation_detail_for_variation(flag, index, reason, logger = nil) + vars = flag[:variations] || [] + if index < 0 || index >= vars.length + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") unless logger.nil? + EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) + else + EvaluationDetail.new(vars[index], index, reason) + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/model/preprocessed_data.rb b/lib/ldclient-rb/impl/model/preprocessed_data.rb new file mode 100644 index 00000000..3118ddba --- /dev/null +++ b/lib/ldclient-rb/impl/model/preprocessed_data.rb @@ -0,0 +1,177 @@ +require "ldclient-rb/impl/evaluator_helpers" + +module LaunchDarkly + module Impl + module DataModelPreprocessing + # + # Container for a precomputed result that includes a specific variation index and value, an + # evaluation reason, and optionally an alternate evaluation reason that corresponds to the + # "in experiment" state. + # + class EvalResultsForSingleVariation + def initialize(value, variation_index, regular_reason, in_experiment_reason = nil) + @regular_result = EvaluationDetail.new(value, variation_index, regular_reason) + @in_experiment_result = in_experiment_reason ? + EvaluationDetail.new(value, variation_index, in_experiment_reason) : + @regular_result + end + + # @param in_experiment [Boolean] indicates whether we want the result to include + # "inExperiment: true" in the reason or not + # @return [EvaluationDetail] + def get_result(in_experiment = false) + in_experiment ? @in_experiment_result : @regular_result + end + end + + # + # Container for a set of precomputed results, one for each possible flag variation. + # + class EvalResultFactoryMultiVariations + def initialize(variation_factories) + @factories = variation_factories + end + + # @param index [Integer] the variation index + # @param in_experiment [Boolean] indicates whether we want the result to include + # "inExperiment: true" in the reason or not + def for_variation(index, in_experiment) + if index < 0 || index >= @factories.length + EvaluationDetail.new(nil, nil, EvaluationReason.error(EvaluationReason::ERROR_MALFORMED_FLAG)) + else + @factories[index].get_result(in_experiment) + end + end + end + + # Base class for all of the preprocessed data classes we embed in our data model. Using this class + # ensures that none of its properties will be included in JSON representations. It also overrides + # == to say that it is always equal with another instance of the same class; equality tests on + # this class are only ever done in test code, and we want the contents of these classes to be + # ignored in test code unless we are looking at specific attributes. + class PreprocessedDataBase + def as_json(*) + nil + end + + def to_json(*a) + "null" + end + + def ==(other) + other.class == self.class + end + end + + class FlagPreprocessed < PreprocessedDataBase + def initialize(off_result, fallthrough_factory) + @off_result = off_result + @fallthrough_factory = fallthrough_factory + end + + # @return [EvalResultsForSingleVariation] + attr_reader :off_result + # @return [EvalResultFactoryMultiVariations] + attr_reader :fallthrough_factory + end + + class PrerequisitePreprocessed < PreprocessedDataBase + def initialize(failed_result) + @failed_result = failed_result + end + + # @return [EvalResultsForSingleVariation] + attr_reader :failed_result + end + + class TargetPreprocessed < PreprocessedDataBase + def initialize(match_result) + @match_result = match_result + end + + # @return [EvalResultsForSingleVariation] + attr_reader :match_result + end + + class FlagRulePreprocessed < PreprocessedDataBase + def initialize(all_match_results) + @all_match_results = all_match_results + end + + # @return [EvalResultsForSingleVariation] + attr_reader :all_match_results + end + + class Preprocessor + def initialize(logger = nil) + @logger = logger + end + + def preprocess_item!(kind, item) + if kind.eql? FEATURES + preprocess_flag!(item) + elsif kind.eql? SEGMENTS + preprocess_segment!(item) + end + end + + def preprocess_all_items!(kind, items_map) + return items_map if !items_map + items_map.each do |key, item| + preprocess_item!(kind, item) + end + end + + def preprocess_flag!(flag) + flag[:_preprocessed] = FlagPreprocessed.new( + EvaluatorHelpers.off_result(flag), + precompute_multi_variation_results(flag, EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true)) + ) + (flag[:prerequisites] || []).each do |prereq| + preprocess_prerequisite!(prereq, flag) + end + (flag[:targets] || []).each do |target| + preprocess_target!(target, flag) + end + rules = flag[:rules] + (rules || []).each_index do |index| + preprocess_flag_rule!(rules[index], index, flag) + end + end + + def preprocess_segment!(segment) + # nothing to do for segments currently + end + + private def preprocess_prerequisite!(prereq, flag) + prereq[:_preprocessed] = PrerequisitePreprocessed.new( + EvaluatorHelpers.prerequisite_failed_result(prereq, flag, @logger) + ) + end + + private def preprocess_target!(target, flag) + target[:_preprocessed] = TargetPreprocessed.new( + EvaluatorHelpers.target_match_result(target, flag, @logger) + ) + end + + private def preprocess_flag_rule!(rule, index, flag) + match_reason = EvaluationReason::rule_match(index, rule[:id]) + match_reason_in_experiment = EvaluationReason::rule_match(index, rule[:id], true) + rule[:_preprocessed] = FlagRulePreprocessed.new( + precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment) + ) + end + + private def precompute_multi_variation_results(flag, regular_reason, in_experiment_reason) + factories = [] + vars = flag[:variations] || [] + vars.each_index do |index| + factories << EvalResultsForSingleVariation.new(vars[index], index, regular_reason, in_experiment_reason) + end + EvalResultFactoryMultiVariations.new(factories) + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index fcf8b135..1d306f46 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -1,13 +1,14 @@ +require "ldclient-rb/impl/model/preprocessed_data" module LaunchDarkly module Impl module Model # Abstraction of deserializing a feature flag or segment that was read from a data store or # received from LaunchDarkly. - def self.deserialize(kind, json) + def self.deserialize(kind, json, logger = nil) return nil if json.nil? item = JSON.parse(json, symbolize_names: true) - postprocess_item_after_deserializing!(kind, item) + DataModelPreprocessing::Preprocessor.new(logger).preprocess_item!(kind, item) item end @@ -18,45 +19,14 @@ def self.serialize(kind, item) end # Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format. - def self.make_all_store_data(received_data) + def self.make_all_store_data(received_data, logger = nil) + preprocessor = DataModelPreprocessing::Preprocessor.new(logger) flags = received_data[:flags] - postprocess_items_after_deserializing!(FEATURES, flags) + preprocessor.preprocess_all_items!(FEATURES, flags) segments = received_data[:segments] - postprocess_items_after_deserializing!(SEGMENTS, segments) + preprocessor.preprocess_all_items!(SEGMENTS, segments) { FEATURES => flags, SEGMENTS => segments } end - - # Called after we have deserialized a model item from JSON (because we received it from LaunchDarkly, - # or read it from a persistent data store). This allows us to precompute some derived attributes that - # will never change during the lifetime of that item. - def self.postprocess_item_after_deserializing!(kind, item) - return if !item - # Currently we are special-casing this for FEATURES; eventually it will be handled by delegating - # to the "kind" object or the item class. - if kind.eql? FEATURES - # For feature flags, we precompute all possible parameterized EvaluationReason instances. - prereqs = item[:prerequisites] - if !prereqs.nil? - prereqs.each do |prereq| - prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) - end - end - rules = item[:rules] - if !rules.nil? - rules.each_index do |i| - rule = rules[i] - rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) - end - end - end - end - - def self.postprocess_items_after_deserializing!(kind, items_map) - return items_map if !items_map - items_map.each do |key, item| - postprocess_item_after_deserializing!(kind, item) - end - end end end end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index f13a63db..7d3c4cb9 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -31,7 +31,7 @@ def initialize(sdk_key, config) def request_all_data() all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true) - Impl::Model.make_all_store_data(all_data) + Impl::Model.make_all_store_data(all_data, @config.logger) end def stop @@ -44,7 +44,7 @@ def stop private def request_single_item(kind, path) - Impl::Model.deserialize(kind, make_request(path)) + Impl::Model.deserialize(kind, make_request(path), @config.logger) end def make_request(path) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 211e6321..5ab3eea8 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -86,7 +86,7 @@ def process_message(message) @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT message = JSON.parse(message.data, symbolize_names: true) - all_data = Impl::Model.make_all_store_data(message[:data]) + all_data = Impl::Model.make_all_store_data(message[:data], @config.logger) @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @@ -97,7 +97,7 @@ def process_message(message) key = key_for_path(kind, data[:path]) if key data = data[:data] - Impl::Model.postprocess_item_after_deserializing!(kind, data) + Impl::DataModelPreprocessing::Preprocessor.new(@config.logger).preprocess_item!(kind, data) @feature_store.upsert(kind, data) break end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index 32db7d79..36767567 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -5,155 +5,155 @@ module LaunchDarkly module Impl - describe "Evaluator (big segments)", :evaluator_spec_base => true do - subject { Evaluator } + evaluator_tests_with_and_without_preprocessing "Evaluator (big segments)" do |desc, factory| + describe "#{desc} - evaluate", :evaluator_spec_base => true do + it "segment is not matched if there is no way to query it" do + segment = factory.segment({ + key: 'test', + included: [ user[:key] ], # included should be ignored for a big segment + version: 1, + unbounded: true, + generation: 1 + }) + e = EvaluatorBuilder.new(logger). + with_segment(segment). + build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) + end - it "segment is not matched if there is no way to query it" do - segment = { - key: 'test', - included: [ user[:key] ], # included should be ignored for a big segment - version: 1, - unbounded: true, - generation: 1 - } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user) - expect(result.detail.value).to be false - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) - end + it "segment with no generation is not matched" do + segment = factory.segment({ + key: 'test', + included: [ user[:key] ], # included should be ignored for a big segment + version: 1, + unbounded: true + }) + e = EvaluatorBuilder.new(logger). + with_segment(segment). + build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) + end - it "segment with no generation is not matched" do - segment = { - key: 'test', - included: [ user[:key] ], # included should be ignored for a big segment - version: 1, - unbounded: true - } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user) - expect(result.detail.value).to be false - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) - end + it "matched with include" do + segment = factory.segment({ + key: 'test', + version: 1, + unbounded: true, + generation: 2 + }) + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, true). + build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end - it "matched with include" do - segment = { - key: 'test', - version: 1, - unbounded: true, - generation: 2 - } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, true). - build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user) - expect(result.detail.value).to be true - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) - end + it "matched with rule" do + segment = factory.segment({ + key: 'test', + version: 1, + unbounded: true, + generation: 2, + rules: [ + { clauses: [ make_user_matching_clause(user) ] } + ] + }) + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, nil). + build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end - it "matched with rule" do - segment = { - key: 'test', - version: 1, - unbounded: true, - generation: 2, - rules: [ - { clauses: [ make_user_matching_clause(user) ] } - ] - } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, nil). - build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user) - expect(result.detail.value).to be true - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) - end + it "unmatched by exclude regardless of rule" do + segment = factory.segment({ + key: 'test', + version: 1, + unbounded: true, + generation: 2, + rules: [ + { clauses: [ make_user_matching_clause(user) ] } + ] + }) + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, false). + build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user) + expect(result.detail.value).to be false + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + end - it "unmatched by exclude regardless of rule" do - segment = { - key: 'test', - version: 1, - unbounded: true, - generation: 2, - rules: [ - { clauses: [ make_user_matching_clause(user) ] } - ] - }; - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, false). - build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user) - expect(result.detail.value).to be false - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) - end - - it "status is returned from provider" do - segment = { - key: 'test', - version: 1, - unbounded: true, - generation: 2 - } - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, true). - with_big_segments_status(BigSegmentsStatus::STALE). - build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - result = e.evaluate(flag, user) - expect(result.detail.value).to be true - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) - end + it "status is returned from provider" do + segment = factory.segment({ + key: 'test', + version: 1, + unbounded: true, + generation: 2 + }) + e = EvaluatorBuilder.new(logger). + with_segment(segment). + with_big_segment_for_user(user, segment, true). + with_big_segments_status(BigSegmentsStatus::STALE). + build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + result = e.evaluate(flag, user) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) + end - it "queries state only once per user even if flag references multiple segments" do - segment1 = { - key: 'segmentkey1', - version: 1, - unbounded: true, - generation: 2 - } - segment2 = { - key: 'segmentkey2', - version: 1, - unbounded: true, - generation: 3 - } - flag = { - key: 'key', - on: true, - fallthrough: { variation: 0 }, - variations: [ false, true ], - rules: [ - { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, - { variation: 1, clauses: [ make_segment_match_clause(segment2) ]} - ] - } - - queries = [] - e = EvaluatorBuilder.new(logger). - with_segment(segment1).with_segment(segment2). - with_big_segment_for_user(user, segment2, true). - record_big_segments_queries(queries). - build - # The membership deliberately does not include segment1, because we want the first rule to be - # a non-match so that it will continue on and check segment2 as well. - - result = e.evaluate(flag, user) - expect(result.detail.value).to be true - expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) + it "queries state only once per user even if flag references multiple segments" do + segment1 = factory.segment({ + key: 'segmentkey1', + version: 1, + unbounded: true, + generation: 2 + }) + segment2 = factory.segment({ + key: 'segmentkey2', + version: 1, + unbounded: true, + generation: 3 + }) + flag = factory.flag({ + key: 'key', + on: true, + fallthrough: { variation: 0 }, + variations: [ false, true ], + rules: [ + { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, + { variation: 1, clauses: [ make_segment_match_clause(segment2) ]} + ] + }) + + queries = [] + e = EvaluatorBuilder.new(logger). + with_segment(segment1).with_segment(segment2). + with_big_segment_for_user(user, segment2, true). + record_big_segments_queries(queries). + build + # The membership deliberately does not include segment1, because we want the first rule to be + # a non-match so that it will continue on and check segment2 as well. + + result = e.evaluate(flag, user) + expect(result.detail.value).to be true + expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) - expect(queries).to eq([ user[:key] ]) + expect(queries).to eq([ user[:key] ]) + end end end end diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb index 2b76505d..facf68de 100644 --- a/spec/impl/evaluator_clause_spec.rb +++ b/spec/impl/evaluator_clause_spec.rb @@ -3,52 +3,52 @@ module LaunchDarkly module Impl - describe "Evaluator (clauses)", :evaluator_spec_base => true do - subject { Evaluator } + evaluator_tests_with_and_without_preprocessing "Evaluator (clauses)" do |desc, factory| + describe "#{desc} - evaluate", :evaluator_spec_base => true do + it "can match built-in attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = factory.boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user).detail.value).to be true + end - it "can match built-in attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be true - end - - it "can match custom attribute" do - user = { key: 'x', name: 'Bob', custom: { legs: 4 } } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be true - end + it "can match custom attribute" do + user = { key: 'x', name: 'Bob', custom: { legs: 4 } } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = factory.boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user).detail.value).to be true + end - it "returns false for missing attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be false - end + it "returns false for missing attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = factory.boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + end - it "returns false for unknown operator" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'unknown', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be false - end + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = factory.boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + end - it "does not stop evaluating rules after clause with unknown operator" do - user = { key: 'x', name: 'Bob' } - clause0 = { attribute: 'name', op: 'unknown', values: [4] } - rule0 = { clauses: [ clause0 ], variation: 1 } - clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } - rule1 = { clauses: [ clause1 ], variation: 1 } - flag = boolean_flag_with_rules([rule0, rule1]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be true - end + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = factory.boolean_flag_with_rules([rule0, rule1]) + expect(basic_evaluator.evaluate(flag, user).detail.value).to be true + end - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } + flag = factory.boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + end end end end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 6a6b9310..68e724cd 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -3,124 +3,150 @@ module LaunchDarkly module Impl - describe "Evaluator (rules)", :evaluator_spec_base => true do - subject { Evaluator } - - it "matches user from rules" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result = basic_evaluator.evaluate(flag, user) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(nil) - end - - it "reuses rule match reason instances if possible" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = boolean_flag_with_rules([rule]) - Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached rule match reason - user = { key: 'userkey' } - detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) - expect(result1.detail.reason.rule_id).to eq 'ruleid' - expect(result1.detail.reason).to be result2.detail.reason - end - - it "returns an error if rule variation is too high" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(nil) - end - - it "returns an error if rule variation is negative" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(nil) - end + evaluator_tests_with_and_without_preprocessing "Evaluator (rules)" do |desc, factory| + describe "#{desc} - evaluate", :evaluator_spec_base => true do + it "matches user from rules" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end - it "returns an error if rule has neither variation nor rollout" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(nil) - end + if factory.with_preprocessing + it "reuses rule match result detail instances" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) + expect(result1.detail.reason.rule_id).to eq 'ruleid' + expect(result1.detail).to be result2.detail + end + end - it "returns an error if rule has a rollout with no variations" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { variations: [] } } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(nil) - end + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end - it "coerces user key to a string for evaluation" do - clause = { attribute: 'key', op: 'in', values: ['999'] } - flag = boolean_flag_with_clauses([clause]) - user = { key: 999 } - result = basic_evaluator.evaluate(flag, user) - expect(result.detail.value).to eq(true) - end + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end - it "coerces secondary key to a string for evaluation" do - # We can't really verify that the rollout calculation works correctly, but we can at least - # make sure it doesn't error out if there's a non-string secondary value (ch35189) - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user) - expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) - end + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end - describe "experiment rollout behavior" do - it "sets the in_experiment value if rollout kind is experiment " do + it "returns an error if rule has a rollout with no variations" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } + rollout: { variations: [] } } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user) - expect(result.detail.reason.to_json).to include('"inExperiment":true') - expect(result.detail.reason.in_experiment).to eq(true) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) end - it "does not set the in_experiment value if rollout kind is not experiment " do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = factory.boolean_flag_with_clauses([clause]) + user = { key: 999 } result = basic_evaluator.evaluate(flag, user) - expect(result.detail.reason.to_json).to_not include('"inExperiment":true') - expect(result.detail.reason.in_experiment).to eq(nil) + expect(result.detail.value).to eq(true) end - it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } - flag = boolean_flag_with_rules([rule]) + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = factory.boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user) - expect(result.detail.reason.to_json).to_not include('"inExperiment":true') - expect(result.detail.reason.in_experiment).to eq(nil) + expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) + end + + describe "rule experiment/rollout behavior" do + it "evaluates rollout for rule" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end + + if factory.with_preprocessing + it "reuses rule rollout result detail instance" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) + end + end + + it "sets the in_experiment value if rollout kind is experiment " do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user) + expect(result.detail.reason.to_json).to include('"inExperiment":true') + expect(result.detail.reason.in_experiment).to eq(true) + end + + it "does not set the in_experiment value if rollout kind is not experiment " do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user) + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + + it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } + flag = factory.boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user) + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end end end end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index bb526b7c..70d86546 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -3,118 +3,118 @@ module LaunchDarkly module Impl - describe "Evaluator (segments)", :evaluator_spec_base => true do - subject { Evaluator } + evaluator_tests_with_and_without_preprocessing "Evaluator (segments)" do |desc, factory| + describe "#{desc} - evaluate", :evaluator_spec_base => true do + def test_segment_match(factory, segment) + clause = make_segment_match_clause(segment) + flag = factory.boolean_flag_with_clauses([clause]) + e = EvaluatorBuilder.new(logger).with_segment(segment).build + e.evaluate(flag, user).detail.value + end - def test_segment_match(segment) - clause = make_segment_match_clause(segment) - flag = boolean_flag_with_clauses([clause]) - e = EvaluatorBuilder.new(logger).with_segment(segment).build - e.evaluate(flag, user).detail.value - end - - it "retrieves segment from segment store for segmentMatch operator" do - segment = { - key: 'segkey', - included: [ 'userkey' ], - version: 1, - deleted: false - } - e = EvaluatorBuilder.new(logger).with_segment(segment).build - flag = boolean_flag_with_clauses([make_segment_match_clause(segment)]) - expect(e.evaluate(flag, user).detail.value).to be true - end + it "retrieves segment from segment store for segmentMatch operator" do + segment = { + key: 'segkey', + included: [ 'userkey' ], + version: 1, + deleted: false + } + e = EvaluatorBuilder.new(logger).with_segment(segment).build + flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + expect(e.evaluate(flag, user).detail.value).to be true + end - it "falls through with no errors if referenced segment is not found" do - e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(e.evaluate(flag, user).detail.value).to be false - end + it "falls through with no errors if referenced segment is not found" do + e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = factory.boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user).detail.value).to be false + end - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(factory, segment)).to be true + end - it 'explicitly excludes user' do - segment = make_segment('segkey') - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be false - end + it 'explicitly excludes user' do + segment = make_segment('segkey') + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(factory, segment)).to be false + end - it 'both includes and excludes user; include takes priority' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end + it 'both includes and excludes user; include takes priority' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(factory, segment)).to be true + end - it 'matches user by rule when weight is absent' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end + it 'matches user by rule when weight is absent' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(factory, segment)).to be true + end - it 'matches user by rule when weight is nil' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: nil - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end + it 'matches user by rule when weight is nil' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: nil + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(factory, segment)).to be true + end - it 'matches user with full rollout' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 100000 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end + it 'matches user with full rollout' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 100000 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(factory, segment)).to be true + end - it "doesn't match user with zero rollout" do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 0 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end + it "doesn't match user with zero rollout" do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 0 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(factory, segment)).to be false + end - it "matches user with multiple clauses" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end + it "matches user with multiple clauses" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(factory, segment)).to be true + end - it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segClause2[:values] = [ 'wrong' ] - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false + it "doesn't match user with multiple clauses if a clause doesn't match" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segClause2[:values] = [ 'wrong' ] + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(factory, segment)).to be false + end end end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 20b231fb..7ac31728 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -1,21 +1,20 @@ require "events_test_util" +require "model_builders" require "spec_helper" require "impl/evaluator_spec_base" module LaunchDarkly module Impl - describe "Evaluator (general)", :evaluator_spec_base => true do - subject { Evaluator } - - describe "evaluate" do + evaluator_tests_with_and_without_preprocessing "Evaluator (general)" do |desc, factory| + describe "#{desc} - evaluate", :evaluator_spec_base => true do it "returns off variation if flag is off" do - flag = { + flag = factory.flag({ key: 'feature', on: false, offVariation: 1, fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'] - } + }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::off) result = basic_evaluator.evaluate(flag, user) @@ -24,12 +23,12 @@ module Impl end it "returns nil if flag is off and off variation is unspecified" do - flag = { + flag = factory.flag({ key: 'feature', on: false, fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'] - } + }) user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::off) result = basic_evaluator.evaluate(flag, user) @@ -37,14 +36,32 @@ module Impl expect(result.prereq_evals).to eq(nil) end + if factory.with_preprocessing + it "reuses off result detail instance" do + flag = factory.flag({ + key: 'feature', + on: false, + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + }) + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, EvaluationReason::off) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) + end + end + it "returns an error if off variation is too high" do - flag = { + flag = factory.flag({ key: 'feature', on: false, offVariation: 999, fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'] - } + }) user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -54,13 +71,13 @@ module Impl end it "returns an error if off variation is negative" do - flag = { + flag = factory.flag({ key: 'feature', on: false, offVariation: -1, fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'] - } + }) user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -70,14 +87,14 @@ module Impl end it "returns off variation if prerequisite is not found" do - flag = { + flag = factory.flag({ key: 'feature0', on: true, prerequisites: [{key: 'badfeature', variation: 1}], fallthrough: { variation: 0 }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build @@ -86,26 +103,27 @@ module Impl expect(result.prereq_evals).to eq(nil) end - it "reuses prerequisite-failed reason instances if possible" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'badfeature', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason - user = { key: 'x' } - e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result1 = e.evaluate(flag, user) - expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') - result2 = e.evaluate(flag, user) - expect(result2.detail.reason).to be result1.detail.reason + if factory.with_preprocessing + it "reuses prerequisite-failed result detail instances" do + flag = factory.flag({ + key: 'feature0', + on: true, + prerequisites: [{key: 'badfeature', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + }) + user = { key: 'x' } + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build + result1 = e.evaluate(flag, user) + expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') + result2 = e.evaluate(flag, user) + expect(result2.detail).to be result1.detail + end end it "returns off variation and event if prerequisite of a prerequisite is not found" do - flag = { + flag = factory.flag({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -113,15 +131,15 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], version: 1 - } - flag1 = { + }) + flag1 = factory.flag({ key: 'feature1', on: true, prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist fallthrough: { variation: 0 }, variations: ['d', 'e'], version: 2 - } + }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ @@ -134,7 +152,7 @@ module Impl end it "returns off variation and event if prerequisite is off" do - flag = { + flag = factory.flag({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -142,8 +160,8 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], version: 1 - } - flag1 = { + }) + flag1 = factory.flag({ key: 'feature1', on: false, # note that even though it returns the desired variation, it is still off and therefore not a match @@ -151,7 +169,7 @@ module Impl fallthrough: { variation: 0 }, variations: ['d', 'e'], version: 2 - } + }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ @@ -164,7 +182,7 @@ module Impl end it "returns off variation and event if prerequisite is not met" do - flag = { + flag = factory.flag({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -172,14 +190,14 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], version: 1 - } - flag1 = { + }) + flag1 = factory.flag({ key: 'feature1', on: true, fallthrough: { variation: 0 }, variations: ['d', 'e'], version: 2 - } + }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ @@ -192,7 +210,7 @@ module Impl end it "returns fallthrough variation and event if prerequisite is met and there are no rules" do - flag = { + flag = factory.flag({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -200,14 +218,14 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], version: 1 - } - flag1 = { + }) + flag1 = factory.flag({ key: 'feature1', on: true, fallthrough: { variation: 1 }, variations: ['d', 'e'], version: 2 - } + }) user = { key: 'x' } detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) expected_prereqs = [ @@ -219,14 +237,55 @@ module Impl expect(result.prereq_evals).to eq(expected_prereqs) end + it "returns fallthrough variation if flag is on and no rules match" do + flag = factory.flag({ + key: 'feature0', + on: true, + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1, + rules: [ + { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] } + ] + }) + user = { key: 'x' } + detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end + + if factory.with_preprocessing + it "reuses fallthrough variation result detail instance" do + flag = factory.flag({ + key: 'feature0', + on: true, + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1, + rules: [ + { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] } + ] + }) + user = { key: 'x' } + detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) + end + end + it "returns an error if fallthrough variation is too high" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { variation: 999 }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user) @@ -235,13 +294,13 @@ module Impl end it "returns an error if fallthrough variation is negative" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { variation: -1 }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user) @@ -250,13 +309,13 @@ module Impl end it "returns an error if fallthrough has no variation or rollout" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user) @@ -265,13 +324,13 @@ module Impl end it "returns an error if fallthrough has a rollout with no variations" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { rollout: { variations: [] } }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user) @@ -280,7 +339,7 @@ module Impl end it "matches user from targets" do - flag = { + flag = factory.flag({ key: 'feature', on: true, targets: [ @@ -289,7 +348,7 @@ module Impl fallthrough: { variation: 0 }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) result = basic_evaluator.evaluate(flag, user) @@ -297,15 +356,71 @@ module Impl expect(result.prereq_evals).to eq(nil) end - describe "experiment rollout behavior" do + if factory.with_preprocessing + it "reuses target-match result detail instances" do + flag = factory.flag({ + key: 'feature', + on: true, + targets: [ + { values: [ 'whoever', 'userkey' ], variation: 2 } + ], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + }) + user = { key: 'userkey' } + detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) + end + end + + describe "fallthrough experiment/rollout behavior" do + it "evaluates rollout for fallthrough" do + flag = factory.flag({ + key: 'feature0', + on: true, + fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + }) + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) + result = basic_evaluator.evaluate(flag, user) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end + + if factory.with_preprocessing + it "reuses fallthrough rollout result detail instance" do + flag = factory.flag({ + key: 'feature0', + on: true, + fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + }) + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) + result1 = basic_evaluator.evaluate(flag, user) + result2 = basic_evaluator.evaluate(flag, user) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) + end + end + it "sets the in_experiment value if rollout kind is experiment and untracked false" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to include('"inExperiment":true') @@ -313,13 +428,13 @@ module Impl end it "does not set the in_experiment value if rollout kind is not experiment" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') @@ -327,13 +442,13 @@ module Impl end it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do - flag = { + flag = factory.flag({ key: 'feature', on: true, fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } }, offVariation: 1, variations: ['a', 'b', 'c'] - } + }) user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 6008c8b9..fc1f0414 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -1,7 +1,22 @@ require "ldclient-rb/impl/big_segments" +require "model_builders" require "spec_helper" +def evaluator_tests_with_and_without_preprocessing(desc_base) + # In the evaluator tests, we are really testing two sets of evaluation logic: one where preprocessed + # results are not available, and one where they are. In normal usage, flags always get preprocessed and + # we expect evaluations to almost always be able to reuse a preprocessed result-- but we still want to + # verify that the evaluator works even if preprocessing hasn't happened, since a flag is just a Hash and + # so we can't do any type-level enforcement to constrain its state. The DataItemFactory abstraction + # controls whether flags/segments created in these tests do or do not have preprocessing applied. + [true, false].each do |with_preprocessing| + pre_desc = with_preprocessing ? "with preprocessing" : "without preprocessing" + desc = "#{desc_base} - #{pre_desc}" + yield desc, DataItemFactory.new(with_preprocessing) + end +end + module LaunchDarkly module Impl class EvaluatorBuilder @@ -91,14 +106,6 @@ def basic_evaluator EvaluatorBuilder.new(logger).build end - def boolean_flag_with_rules(rules) - { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } - end - - def boolean_flag_with_clauses(clauses) - boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) - end - def make_user_matching_clause(user, attr = :key) { attribute: attr.to_s, diff --git a/spec/impl/model/preprocessed_data_spec.rb b/spec/impl/model/preprocessed_data_spec.rb new file mode 100644 index 00000000..c805a3d2 --- /dev/null +++ b/spec/impl/model/preprocessed_data_spec.rb @@ -0,0 +1,45 @@ +require "model_builders" +require "spec_helper" + +def strip_preprocessed_nulls(json) + # currently we can't avoid emitting these null properties - we just don't want to see anything other than null there + json.gsub('"_preprocessed":null,', '').gsub(',"_preprocessed":null', '') +end + +module LaunchDarkly + module Impl + module DataModelPreprocessing + describe "preprocessed data is not emitted in JSON" do + it "for flag" do + original_flag = { + key: 'flagkey', + version: 1, + on: true, + offVariation: 0, + variations: [true, false], + fallthroughVariation: 1, + prerequisites: [ + { key: 'a', variation: 0 } + ], + targets: [ + { variation: 0, values: ['a'] } + ], + rules: [ + { + variation: 0, + clauses: [ + { attribute: 'key', op: 'in', values: ['a'] } + ] + } + ] + } + flag = clone_json_object(original_flag) + Preprocessor.new().preprocess_flag!(flag) + json = Model.serialize(FEATURES, flag) + parsed = JSON.parse(strip_preprocessed_nulls(json), symbolize_names: true) + expect(parsed).to eq(original_flag) + end + end + end + end +end diff --git a/spec/impl/model/serialization_spec.rb b/spec/impl/model/serialization_spec.rb index 0a26bcd5..0d6fa4de 100644 --- a/spec/impl/model/serialization_spec.rb +++ b/spec/impl/model/serialization_spec.rb @@ -1,9 +1,12 @@ +require "model_builders" require "spec_helper" module LaunchDarkly module Impl module Model describe "model serialization" do + factory = DataItemFactory.new(true) # true = enable the usual preprocessing logic + it "serializes flag" do flag = { key: "flagkey", version: 1 } json = Model.serialize(FEATURES, flag) @@ -24,16 +27,18 @@ module Model it "deserializes flag with no rules or prerequisites" do flag_in = { key: "flagkey", version: 1 } - json = Model.serialize(FEATURES, flag_in) + flag_preprocessed = factory.flag(flag_in) + json = Model.serialize(FEATURES, flag_preprocessed) flag_out = Model.deserialize(FEATURES, json) - expect(flag_out).to eq flag_in + expect(flag_out).to eq flag_preprocessed end it "deserializes segment" do segment_in = { key: "segkey", version: 1 } - json = Model.serialize(SEGMENTS, segment_in) + segment_preprocessed = factory.segment(segment_in) + json = Model.serialize(SEGMENTS, segment_preprocessed) segment_out = Model.deserialize(SEGMENTS, json) - expect(segment_out).to eq segment_in + expect(segment_out).to eq factory.segment(segment_preprocessed) end end end diff --git a/spec/model_builders.rb b/spec/model_builders.rb index a7c0bd6e..366155da 100644 --- a/spec/model_builders.rb +++ b/spec/model_builders.rb @@ -1,3 +1,45 @@ +require "ldclient-rb/impl/model/preprocessed_data" +require "json" + +def clone_json_object(o) + JSON.parse(o.to_json, symbolize_names: true) +end + +class DataItemFactory + def initialize(with_preprocessing) + @with_preprocessing = with_preprocessing + end + + def flag(flag_data) + @with_preprocessing ? preprocessed_flag(flag_data) : flag_data + end + + def segment(segment_data) + @with_preprocessing ? preprocessed_segment(segment_data) : segment_data + end + + def boolean_flag_with_rules(rules) + flag({ key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] }) + end + + def boolean_flag_with_clauses(clauses) + flag(boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }])) + end + + attr_reader :with_preprocessing + + private def preprocessed_flag(o) + ret = clone_json_object(o) + LaunchDarkly::Impl::DataModelPreprocessing::Preprocessor.new().preprocess_flag!(ret) + ret + end + + private def preprocessed_segment(o) + ret = clone_json_object(o) + LaunchDarkly::Impl::DataModelPreprocessing::Preprocessor.new().preprocess_segment!(ret) + ret + end +end class FlagBuilder def initialize(key) @@ -10,7 +52,7 @@ def initialize(key) end def build - @flag.clone + DataItemFactory.new(true).flag(@flag) end def version(value) @@ -111,7 +153,7 @@ def initialize(key) end def build - @segment.clone + DataItemFactory.new(true).segment(@segment) end def included(*keys) diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 0851b4aa..f9f40fa0 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,9 +1,12 @@ require "http_util" +require "model_builders" require "spec_helper" $sdk_key = "secret" describe LaunchDarkly::Requestor do + factory = DataItemFactory.new(true) # true = enable the usual preprocessing logic + def with_requestor(base_uri, opts = {}) r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new({ base_uri: base_uri, application: {id: "id", version: "version"} }.merge(opts))) begin @@ -31,7 +34,7 @@ def with_requestor(base_uri, opts = {}) end it "parses response" do - expected_data = { flags: { x: { key: "x" } } } + expected_data = { flags: { x: factory.flag({ key: "x" }) } } with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| server.setup_ok_response("/", expected_data.to_json) @@ -88,7 +91,7 @@ def with_requestor(base_uri, opts = {}) it "can reuse cached data" do etag = "xyz" - expected_data = { flags: { x: { key: "x" } } } + expected_data = { flags: { x: factory.flag({ key: "x" }) } } with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| server.setup_response("/") do |req, res| @@ -113,8 +116,8 @@ def with_requestor(base_uri, opts = {}) it "replaces cached data with new data" do etag1 = "abc" etag2 = "xyz" - expected_data1 = { flags: { x: { key: "x" } } } - expected_data2 = { flags: { y: { key: "y" } } } + expected_data1 = { flags: { x: factory.flag({ key: "x" }) } } + expected_data2 = { flags: { y: factory.flag({ key: "y" }) } } with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| server.setup_response("/") do |req, res| @@ -197,7 +200,7 @@ def with_requestor(base_uri, opts = {}) # use a real proxy that really forwards requests to another test server, because # that test server would be at localhost, and proxy environment variables are # ignored if the target is localhost. - expected_data = { flags: { flagkey: { key: "flagkey" } } } + expected_data = { flags: { flagkey: factory.flag({ key: "flagkey" }) } } with_server do |proxy| proxy.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json", { "etag" => "x" }) begin diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 39c678c4..4f2d7b85 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -1,7 +1,10 @@ require "ld-eventsource" +require "model_builders" require "spec_helper" describe LaunchDarkly::StreamProcessor do + factory = DataItemFactory.new(true) # true = enable the usual preprocessing logic + subject { LaunchDarkly::StreamProcessor } let(:config) { LaunchDarkly::Config.new } let(:processor) { subject.new("sdk_key", config) } @@ -15,16 +18,16 @@ it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(factory.flag(key: "asdf")) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(factory.segment(key: "segkey")) end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(factory.flag(key: "asdf", version: 1)) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(factory.segment(key: "asdf", version: 1)) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) From e98c61a42ae92794f3ec56ca535131c346b36081 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 13 Sep 2022 14:42:03 -0700 Subject: [PATCH 249/292] rubocop reformatting --- contract-tests/service.rb | 2 +- lib/ldclient-rb/impl/event_sender.rb | 2 +- .../impl/model/preprocessed_data.rb | 14 ++-- lib/ldclient-rb/impl/util.rb | 2 +- spec/config_spec.rb | 4 +- spec/event_sender_spec.rb | 2 +- spec/impl/evaluator_big_segments_spec.rb | 68 +++++++++---------- spec/impl/evaluator_spec.rb | 20 +++--- spec/impl/model/preprocessed_data_spec.rb | 12 ++-- 9 files changed, 63 insertions(+), 63 deletions(-) diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 2ed9d3b6..edca9063 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -30,7 +30,7 @@ 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', 'tags', - ] + ], }.to_json end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index 5a77a8c1..cc5da055 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -33,7 +33,7 @@ def send_event_data(event_data, description, is_diagnostic) begin http_client = @http_client_pool.acquire() response = nil - (0..1).each do |attempt| + 2.times do |attempt| if attempt > 0 @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } sleep(@retry_interval) diff --git a/lib/ldclient-rb/impl/model/preprocessed_data.rb b/lib/ldclient-rb/impl/model/preprocessed_data.rb index 3118ddba..10044112 100644 --- a/lib/ldclient-rb/impl/model/preprocessed_data.rb +++ b/lib/ldclient-rb/impl/model/preprocessed_data.rb @@ -41,7 +41,7 @@ def for_variation(index, in_experiment) else @factories[index].get_result(in_experiment) end - end + end end # Base class for all of the preprocessed data classes we embed in our data model. Using this class @@ -53,7 +53,7 @@ class PreprocessedDataBase def as_json(*) nil end - + def to_json(*a) "null" end @@ -138,23 +138,23 @@ def preprocess_flag!(flag) preprocess_flag_rule!(rules[index], index, flag) end end - + def preprocess_segment!(segment) # nothing to do for segments currently end - + private def preprocess_prerequisite!(prereq, flag) prereq[:_preprocessed] = PrerequisitePreprocessed.new( EvaluatorHelpers.prerequisite_failed_result(prereq, flag, @logger) ) end - + private def preprocess_target!(target, flag) target[:_preprocessed] = TargetPreprocessed.new( EvaluatorHelpers.target_match_result(target, flag, @logger) ) end - + private def preprocess_flag_rule!(rule, index, flag) match_reason = EvaluationReason::rule_match(index, rule[:id]) match_reason_in_experiment = EvaluationReason::rule_match(index, rule[:id], true) @@ -162,7 +162,7 @@ def preprocess_segment!(segment) precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment) ) end - + private def precompute_multi_variation_results(flag, regular_reason, in_experiment_reason) factories = [] vars = flag[:variations] || [] diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index fa42c80a..6c9801bb 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -56,7 +56,7 @@ def self.validate_application_value(value, name, logger) return "" end - if value.match(/[^a-zA-Z0-9._-]/) + if /[^a-zA-Z0-9._-]/.match?(value) logger.warn { "Value of application[#{name}] contained invalid characters and was discarded" } return "" end diff --git a/spec/config_spec.rb b/spec/config_spec.rb index 2b66e8b9..692e9257 100644 --- a/spec/config_spec.rb +++ b/spec/config_spec.rb @@ -76,7 +76,7 @@ end it "will drop invalid values" do - [" ", "@", ":", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-a"]. each do |value| + [" ", "@", ":", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-a"].each do |value| expect(subject.new(logger: $null_log, application: { id: value, version: value }).application).to eq ({ id: "", version: "" }) end end @@ -86,7 +86,7 @@ { :id => "id", :version => "version", :expected => "application-id/id application-version/version" }, { :id => "id", :version => "", :expected => "application-id/id" }, { :id => "", :version => "version", :expected => "application-version/version" }, - { :id => "", :version => "", :expected => "" } + { :id => "", :version => "", :expected => "" }, ].each do |test_case| config = subject.new(application: { id: test_case[:id], version: test_case[:version] }) expect(LaunchDarkly::Impl::Util.application_header_value(config.application)).to eq test_case[:expected] diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 04c75848..6b7e323f 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -45,7 +45,7 @@ def with_sender_and_server "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], "x-launchdarkly-event-schema" => [ "3" ], "x-launchdarkly-tags" => [ "application-id/id application-version/version" ], - "connection" => [ "Keep-Alive" ] + "connection" => [ "Keep-Alive" ], }) expect(req.header['x-launchdarkly-payload-id']).not_to eq [] end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index 26ee8378..5b3c552b 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -13,11 +13,11 @@ module Impl included: [ user[:key] ], # included should be ignored for a big segment version: 1, unbounded: true, - generation: 1 + generation: 1, }) - e = EvaluatorBuilder.new(logger). - with_segment(segment). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .build flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be false @@ -31,9 +31,9 @@ module Impl version: 1, unbounded: true, }) - e = EvaluatorBuilder.new(logger). - with_segment(segment). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .build flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be false @@ -47,10 +47,10 @@ module Impl unbounded: true, generation: 2, }) - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, true). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, true) + .build flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be true @@ -64,13 +64,13 @@ module Impl unbounded: true, generation: 2, rules: [ - { clauses: [ make_user_matching_clause(user) ] } + { clauses: [ make_user_matching_clause(user) ] }, ], }) - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, nil). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, nil) + .build flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be true @@ -84,13 +84,13 @@ module Impl unbounded: true, generation: 2, rules: [ - { clauses: [ make_user_matching_clause(user) ] } + { clauses: [ make_user_matching_clause(user) ] }, ], }) - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, false). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, false) + .build flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be false @@ -104,11 +104,11 @@ module Impl unbounded: true, generation: 2, }) - e = EvaluatorBuilder.new(logger). - with_segment(segment). - with_big_segment_for_user(user, segment, true). - with_big_segments_status(BigSegmentsStatus::STALE). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment) + .with_big_segment_for_user(user, segment, true) + .with_big_segments_status(BigSegmentsStatus::STALE) + .build flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) result = e.evaluate(flag, user) expect(result.detail.value).to be true @@ -135,19 +135,19 @@ module Impl variations: [ false, true ], rules: [ { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, - { variation: 1, clauses: [ make_segment_match_clause(segment2) ]} + { variation: 1, clauses: [ make_segment_match_clause(segment2) ]}, ], }) - + queries = [] - e = EvaluatorBuilder.new(logger). - with_segment(segment1).with_segment(segment2). - with_big_segment_for_user(user, segment2, true). - record_big_segments_queries(queries). - build + e = EvaluatorBuilder.new(logger) + .with_segment(segment1).with_segment(segment2) + .with_big_segment_for_user(user, segment2, true) + .record_big_segments_queries(queries) + .build # The membership deliberately does not include segment1, because we want the first rule to be # a non-match so that it will continue on and check segment2 as well. - + result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 1a2b855e..8dfdde05 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -246,8 +246,8 @@ module Impl variations: ['a', 'b', 'c'], version: 1, rules: [ - { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] } - ] + { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] }, + ], }) user = { key: 'x' } detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) @@ -266,8 +266,8 @@ module Impl variations: ['a', 'b', 'c'], version: 1, rules: [ - { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] } - ] + { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] }, + ], }) user = { key: 'x' } detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) @@ -362,11 +362,11 @@ module Impl key: 'feature', on: true, targets: [ - { values: [ 'whoever', 'userkey' ], variation: 2 } + { values: [ 'whoever', 'userkey' ], variation: 2 }, ], fallthrough: { variation: 0 }, offVariation: 1, - variations: ['a', 'b', 'c'] + variations: ['a', 'b', 'c'], }) user = { key: 'userkey' } detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) @@ -382,10 +382,10 @@ module Impl flag = factory.flag({ key: 'feature0', on: true, - fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, offVariation: 1, variations: ['a', 'b', 'c'], - version: 1 + version: 1, }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) @@ -399,10 +399,10 @@ module Impl flag = factory.flag({ key: 'feature0', on: true, - fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, offVariation: 1, variations: ['a', 'b', 'c'], - version: 1 + version: 1, }) user = { key: 'x' } detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) diff --git a/spec/impl/model/preprocessed_data_spec.rb b/spec/impl/model/preprocessed_data_spec.rb index c805a3d2..7b2c9fa7 100644 --- a/spec/impl/model/preprocessed_data_spec.rb +++ b/spec/impl/model/preprocessed_data_spec.rb @@ -19,19 +19,19 @@ module DataModelPreprocessing variations: [true, false], fallthroughVariation: 1, prerequisites: [ - { key: 'a', variation: 0 } + { key: 'a', variation: 0 }, ], targets: [ - { variation: 0, values: ['a'] } + { variation: 0, values: ['a'] }, ], rules: [ { variation: 0, clauses: [ - { attribute: 'key', op: 'in', values: ['a'] } - ] - } - ] + { attribute: 'key', op: 'in', values: ['a'] }, + ], + }, + ], } flag = clone_json_object(original_flag) Preprocessor.new().preprocess_flag!(flag) From 5be3532e660b99bd453318b953e77bbb61f72764 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 13 Sep 2022 15:07:03 -0700 Subject: [PATCH 250/292] add super constructor calls --- lib/ldclient-rb/impl/model/preprocessed_data.rb | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/ldclient-rb/impl/model/preprocessed_data.rb b/lib/ldclient-rb/impl/model/preprocessed_data.rb index 10044112..358f6567 100644 --- a/lib/ldclient-rb/impl/model/preprocessed_data.rb +++ b/lib/ldclient-rb/impl/model/preprocessed_data.rb @@ -65,6 +65,7 @@ def ==(other) class FlagPreprocessed < PreprocessedDataBase def initialize(off_result, fallthrough_factory) + super @off_result = off_result @fallthrough_factory = fallthrough_factory end @@ -77,6 +78,7 @@ def initialize(off_result, fallthrough_factory) class PrerequisitePreprocessed < PreprocessedDataBase def initialize(failed_result) + super @failed_result = failed_result end @@ -86,6 +88,7 @@ def initialize(failed_result) class TargetPreprocessed < PreprocessedDataBase def initialize(match_result) + super @match_result = match_result end @@ -95,6 +98,7 @@ def initialize(match_result) class FlagRulePreprocessed < PreprocessedDataBase def initialize(all_match_results) + super @all_match_results = all_match_results end From 00e8a2551945e140b288647ed52320b94209bbfd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 13 Sep 2022 15:12:13 -0700 Subject: [PATCH 251/292] disable rubocop Rails rules and fix some remaining syntax offenses --- .circleci/config.yml | 1 - .rubocop.yml | 56 ---------------------------- contract-tests/service.rb | 2 +- lib/ldclient-rb/impl/event_sender.rb | 2 +- lib/ldclient-rb/impl/util.rb | 2 +- spec/config_spec.rb | 4 +- spec/event_sender_spec.rb | 2 +- 7 files changed, 6 insertions(+), 63 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 144716da..b1cc31db 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -11,7 +11,6 @@ workflows: - rubocop/rubocop: after-install-rubocop: - run: gem install rubocop-performance - - run: gem install rubocop-rails - build-test-windows - build-test-linux: name: Ruby 2.6 diff --git a/.rubocop.yml b/.rubocop.yml index a63d0b3a..d5a11033 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,5 +1,4 @@ require: - - rubocop-rails - rubocop-performance AllCops: @@ -113,10 +112,6 @@ Metrics/CyclomaticComplexity: of test cases needed to validate a method. Enabled: false -Rails/Delegate: - Description: 'Prefer delegate method for delegations.' - Enabled: false - Style/PreferredHashMethods: Description: 'Checks for use of deprecated Hash methods.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#hash-key' @@ -586,54 +581,6 @@ Performance/StringReplacement: Reference: 'https://github.com/JuanitoFatas/fast-ruby#stringgsub-vs-stringtr-code' Enabled: false -# Rails - -Rails/ActionFilter: - Description: 'Enforces consistent use of action filter methods.' - Enabled: false - -Rails/Date: - Description: >- - Checks the correct usage of date aware methods, - such as Date.today, Date.current etc. - Enabled: false - -Rails/FindBy: - Description: 'Prefer find_by over where.first.' - Enabled: false - -Rails/FindEach: - Description: 'Prefer all.find_each over all.find.' - Enabled: false - -Rails/HasAndBelongsToMany: - Description: 'Prefer has_many :through to has_and_belongs_to_many.' - Enabled: false - -Rails/Output: - Description: 'Checks for calls to puts, print, etc.' - Enabled: false - -Rails/ReadWriteAttribute: - Description: >- - Checks for read_attribute(:attr) and - write_attribute(:attr, val). - Enabled: false - -Rails/ScopeArgs: - Description: 'Checks the arguments of ActiveRecord scopes.' - Enabled: false - -Rails/TimeZone: - Description: 'Checks the correct usage of time zone aware methods.' - StyleGuide: 'https://github.com/bbatsov/rails-style-guide#time' - Reference: 'http://danilenko.org/2012/7/6/rails_timezones' - Enabled: false - -Rails/Validation: - Description: 'Use validates :attribute, hash of validations.' - Enabled: false - # Disabled temporarily while we bring code base inline Layout/ArgumentAlignment: Enabled: false @@ -794,9 +741,6 @@ Naming/RescuedExceptionsVariableName: Naming/VariableNumber: Enabled: false -Rails/SkipsModelValidations: - Enabled: false - Style/AccessorGrouping: Enabled: false diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 2ed9d3b6..edca9063 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -30,7 +30,7 @@ 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', 'tags', - ] + ], }.to_json end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index 5a77a8c1..cc5da055 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -33,7 +33,7 @@ def send_event_data(event_data, description, is_diagnostic) begin http_client = @http_client_pool.acquire() response = nil - (0..1).each do |attempt| + 2.times do |attempt| if attempt > 0 @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } sleep(@retry_interval) diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index fa42c80a..6c9801bb 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -56,7 +56,7 @@ def self.validate_application_value(value, name, logger) return "" end - if value.match(/[^a-zA-Z0-9._-]/) + if /[^a-zA-Z0-9._-]/.match?(value) logger.warn { "Value of application[#{name}] contained invalid characters and was discarded" } return "" end diff --git a/spec/config_spec.rb b/spec/config_spec.rb index 2b66e8b9..692e9257 100644 --- a/spec/config_spec.rb +++ b/spec/config_spec.rb @@ -76,7 +76,7 @@ end it "will drop invalid values" do - [" ", "@", ":", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-a"]. each do |value| + [" ", "@", ":", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789._-a"].each do |value| expect(subject.new(logger: $null_log, application: { id: value, version: value }).application).to eq ({ id: "", version: "" }) end end @@ -86,7 +86,7 @@ { :id => "id", :version => "version", :expected => "application-id/id application-version/version" }, { :id => "id", :version => "", :expected => "application-id/id" }, { :id => "", :version => "version", :expected => "application-version/version" }, - { :id => "", :version => "", :expected => "" } + { :id => "", :version => "", :expected => "" }, ].each do |test_case| config = subject.new(application: { id: test_case[:id], version: test_case[:version] }) expect(LaunchDarkly::Impl::Util.application_header_value(config.application)).to eq test_case[:expected] diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 04c75848..6b7e323f 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -45,7 +45,7 @@ def with_sender_and_server "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], "x-launchdarkly-event-schema" => [ "3" ], "x-launchdarkly-tags" => [ "application-id/id application-version/version" ], - "connection" => [ "Keep-Alive" ] + "connection" => [ "Keep-Alive" ], }) expect(req.header['x-launchdarkly-payload-id']).not_to eq [] end From 87586d4770a829dd4d5196c1de18ba761b5a2b90 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 13 Sep 2022 15:43:22 -0700 Subject: [PATCH 252/292] fix super calls --- lib/ldclient-rb/impl/model/preprocessed_data.rb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/model/preprocessed_data.rb b/lib/ldclient-rb/impl/model/preprocessed_data.rb index 358f6567..c70c3ce8 100644 --- a/lib/ldclient-rb/impl/model/preprocessed_data.rb +++ b/lib/ldclient-rb/impl/model/preprocessed_data.rb @@ -65,7 +65,7 @@ def ==(other) class FlagPreprocessed < PreprocessedDataBase def initialize(off_result, fallthrough_factory) - super + super() @off_result = off_result @fallthrough_factory = fallthrough_factory end @@ -78,7 +78,7 @@ def initialize(off_result, fallthrough_factory) class PrerequisitePreprocessed < PreprocessedDataBase def initialize(failed_result) - super + super() @failed_result = failed_result end @@ -88,7 +88,7 @@ def initialize(failed_result) class TargetPreprocessed < PreprocessedDataBase def initialize(match_result) - super + super() @match_result = match_result end @@ -98,7 +98,7 @@ def initialize(match_result) class FlagRulePreprocessed < PreprocessedDataBase def initialize(all_match_results) - super + super() @all_match_results = all_match_results end From 97321930c140cea149159564b98b26ac4f6ed6bd Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 30 Sep 2022 12:49:51 -0400 Subject: [PATCH 253/292] Add big segment support to contract tests (#201) --- contract-tests/big_segment_store_fixture.rb | 24 +++++++++++++++++++++ contract-tests/client_entity.rb | 24 +++++++++++++++++++++ contract-tests/service.rb | 6 +++++- 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 contract-tests/big_segment_store_fixture.rb diff --git a/contract-tests/big_segment_store_fixture.rb b/contract-tests/big_segment_store_fixture.rb new file mode 100644 index 00000000..8f228e9a --- /dev/null +++ b/contract-tests/big_segment_store_fixture.rb @@ -0,0 +1,24 @@ +require 'http' + +class BigSegmentStoreFixture + def initialize(uri) + @uri = uri + end + + def get_metadata + response = HTTP.post("#{@uri}/getMetadata") + json = response.parse(:json) + LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(json['lastUpToDate']) + end + + def get_membership(user_hash) + response = HTTP.post("#{@uri}/getMembership", :json => {:userHash => user_hash}) + json = response.parse(:json) + + return json['values'] + end + + def stop + HTTP.delete("#{@uri}") + end +end diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 1f5f0fe2..0a1dd471 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -1,6 +1,8 @@ require 'ld-eventsource' require 'json' require 'net/http' +require 'launchdarkly-server-sdk' +require './big_segment_store_fixture' class ClientEntity def initialize(log, config) @@ -34,6 +36,23 @@ def initialize(log, config) opts[:send_events] = false end + if config[:bigSegments] + big_segments = config[:bigSegments] + + store = BigSegmentStoreFixture.new(config[:bigSegments][:callbackUri]) + user_cache_time = big_segments[:userCacheTimeMs].nil? ? nil : big_segments[:userCacheTimeMs] / 1_000 + status_poll_interval_ms = big_segments[:statusPollIntervalMs].nil? ? nil : big_segments[:statusPollIntervalMs] / 1_000 + stale_after_ms = big_segments[:staleAfterMs].nil? ? nil : big_segments[:staleAfterMs] / 1_000 + + opts[:big_segments] = LaunchDarkly::BigSegmentsConfig.new( + store: store, + user_cache_size: big_segments[:userCacheSize], + user_cache_time: user_cache_time, + status_poll_interval: status_poll_interval_ms, + stale_after: stale_after_ms + ) + end + if config[:tags] opts[:application] = { :id => config[:tags][:applicationId], @@ -93,6 +112,11 @@ def flush_events @client.flush end + def get_big_segment_store_status + status = @client.big_segment_store_status_provider.status + { available: status.available, stale: status.stale } + end + def log @log end diff --git a/contract-tests/service.rb b/contract-tests/service.rb index 68b00288..5774ed12 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -4,7 +4,7 @@ require 'net/http' require 'sinatra' -require './client_entity.rb' +require './client_entity' configure :development do disable :show_exceptions @@ -26,6 +26,7 @@ capabilities: [ 'server-side', 'server-side-polling', + 'big-segments', 'all-flags-with-reasons', 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', @@ -95,6 +96,9 @@ when "flushEvents" client.flush_events return 201 + when "getBigSegmentStoreStatus" + status = client.get_big_segment_store_status + return [200, nil, status.to_json] end return [400, nil, {:error => "Unknown command requested"}.to_json] From eac8ae6f0a7194b10292bed2cc165f3ad851aa74 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Mon, 10 Oct 2022 11:21:12 -0400 Subject: [PATCH 254/292] Initial creation of LDContext (#206) This introduces the initial structure and usage of the LDContext class. Instances of this class are expected to be created through static factory methods: ```ruby LaunchDarkly::LDContext.create({...}) LaunchDarkly::LDContext.create_multi([...]) ``` This class is not completed yet. Rather, this initial commit is focused on the creation patterns and the most basic operations. Subsequent commits will continue fleshing out this class and its operation. The `get_value` method will see significant changes as we introduce attribute reference support. Its current more simplistic implementation exists only to serve some interim unit tests. --- lib/ldclient-rb.rb | 1 + lib/ldclient-rb/context.rb | 221 ++++++++++++++++++++++++++++++++ lib/ldclient-rb/impl/context.rb | 23 ++++ spec/context_spec.rb | 166 ++++++++++++++++++++++++ spec/impl/context_spec.rb | 31 +++++ 5 files changed, 442 insertions(+) create mode 100644 lib/ldclient-rb/context.rb create mode 100644 lib/ldclient-rb/impl/context.rb create mode 100644 spec/context_spec.rb create mode 100644 spec/impl/context_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 2bff8c8f..2826b453 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -15,6 +15,7 @@ module LaunchDarkly require "ldclient-rb/memoized_value" require "ldclient-rb/in_memory_store" require "ldclient-rb/config" +require "ldclient-rb/context" require "ldclient-rb/newrelic" require "ldclient-rb/stream" require "ldclient-rb/polling" diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb new file mode 100644 index 00000000..1b682f76 --- /dev/null +++ b/lib/ldclient-rb/context.rb @@ -0,0 +1,221 @@ +require 'set' +require 'ldclient-rb/impl/context' + +module LaunchDarkly + # LDContext is a collection of attributes that can be referenced in flag + # evaluations and analytics events. + # + # (TKTK - some conceptual text here, and/or a link to a docs page) + # + # To create an LDContext of a single kind, such as a user, you may use + # {LDContext#create} or {LDContext#with_key}. + # + # To create an LDContext with multiple kinds, use {LDContext#create_multi}. + # + # Each factory method will always return an LDContext. However, that + # LDContext may be invalid. You can check the validity of the resulting + # context, and the associated errors by calling {LDContext#valid?} and + # {LDContext#error} + class LDContext + # @return [String] Returns the key for this context + attr_reader :key + + # @return [String] Returns the kind for this context + attr_reader :kind + + # @return [String, nil] Returns the error associated with this LDContext if invalid + attr_reader :error + + # + # @private + # @param key [String] + # @param kind [String] + # @param secondary [String, nil] + # @param attributes [Hash, nil] + # @param private_attributes [Array, nil] + # @param error [String, nil] + # @param contexts [Array, nil] + # + def initialize(key, kind, secondary = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) + @key = key + @kind = kind + @secondary = secondary + @attributes = attributes + @private_attributes = private_attributes + @error = error + @contexts = contexts + @is_multi = !contexts.nil? + end + private_class_method :new + + # + # @return [Boolean] Is this LDContext a multi-kind context? + # + def multi_kind? + @is_multi + end + + # + # @return [Boolean] Determine if this LDContext is considered valid + # + def valid? + @error.nil? + end + + # TODO: Update this method to support references. + # + # This method will be changing in subsequent PRs. Eventually it will + # receive a Reference or a string that we will turn into a Reference and + # then we will use that new reference to retrieve the correct value. + # + # However, I want to break this up into multiple PRs. So for now, this is + # doing some very basic lookups so I can verify the little bit of behavior + # I have so far. + # + # Later work will update this code and the tests. + # + # @param attribute [Symbol] + # + def get_value(attribute) + return nil unless valid? + + case attribute + when :key + @key + when :kind + @kind + when :secondary + @secondary + else + @attributes[attribute] + end + end + + # + # Convenience method to create a simple single kind context providing only + # a key and kind type. + # + # @param key [String] + # @param kind [String] + # + def self.with_key(key, kind = "user") + create({key: key, kind: kind}) + end + + # + # Create a single kind context from the provided hash. + # + # The provided hash must match the format as outlined in the + # {https://docs.launchdarkly.com/sdk/features/user-config SDK + # documentation}. + # + # TKTK: Update this link once we know what the new one will be. + # + # @param data [Hash] + # @return [LDContext] + # + def self.create(data) + return create_invalid_context("Cannot create an LDContext. Provided data is not a hash.") unless data.is_a?(Hash) + return create_context_from_legacy_data(data) unless data.has_key?(:kind) + + kind = data[:kind] + unless LaunchDarkly::Impl::Context.validate_kind(kind) + create_invalid_context("The kind (#{kind || 'nil'}) was not valid for the provided context.") + end + + key = data[:key] + unless LaunchDarkly::Impl::Context.validate_key(key) + return create_invalid_context("The key (#{key || 'nil'}) was not valid for the provided context.") + end + + meta = data.fetch(:_meta, {}) + private_attributes = meta[:privateAttributes] + if private_attributes && !private_attributes.is_a?(Array) + return create_invalid_context("The provided private attributes are not an array") + end + + attributes = {} + data.each do |k, v| + # :secondary is not a supported top level key in the new schema. + # However, someone could still include it so we need to ignore it. + attributes[k] = v.clone unless [:key, :kind, :_meta, :secondary].include? k + end + + new(key, kind, meta[:secondary], attributes, private_attributes) + end + + # + # Create a multi-kind context from the array of LDContexts provided. + # + # A multi-kind context is comprised of two or more single kind contexts. + # You cannot include a multi-kind context instead another multi-kind + # context. + # + # Additionally, the kind of each single-kind context must be unique. For + # instance, you cannot create a multi-kind context that includes two user + # kind contexts. + # + # If you attempt to create a multi-kind context from one single-kind + # context, this method will return the single-kind context instead of a new + # multi-kind context wrapping that one single-kind. + # + # @param contexts [Array] + # @return LDContext + # + def self.create_multi(contexts) + return create_invalid_context("Multi-kind context requires an array of LDContexts") unless contexts.is_a?(Array) + return create_invalid_context("Multi-kind context requires at least one context") if contexts.empty? + + kinds = Set.new + contexts.each do |context| + if !context.is_a?(LDContext) + return create_invalid_context("Provided context is not an instance of LDContext") + elsif !context.valid? + return create_invalid_context("Provided context #{context.key} is invalid") + elsif context.multi_kind? + return create_invalid_context("Provided context #{context.key} is a multi-kind context") + elsif kinds.include? context.kind + return create_invalid_context("Kind #{context.kind} cannot occur twice in the same multi-kind context") + end + + kinds.add(context.kind) + end + + return contexts[0] if contexts.length == 1 + + new(nil, "multi", nil, nil, nil, nil, contexts) + end + + # + # @param error [String] + # @return LDContext + # + private_class_method def self.create_invalid_context(error) + return new(nil, nil, nil, nil, nil, "Cannot create an LDContext. Provided data is not a hash.") + end + + # + # @param data [Hash] + # @return LDContext + # + private_class_method def self.create_context_from_legacy_data(data) + key = data[:key] + + # Legacy users are allowed to have "" as a key but they cannot have nil as a key. + return create_invalid_context("The key for the context was not valid") if key.nil? + + attributes = data[:custom].clone || {} + built_in_attributes = [:key, :ip, :email, :name, :avatar, :firstName, :lastName, :country, :anonymous] + built_in_attributes.each do |attr| + attributes[attr] = data[attr].clone if data.has_key? attr + end + + private_attributes = data[:privateAttributeNames] + if private_attributes && !private_attributes.is_a?(Array) + return create_invalid_context("The provided private attributes are not an array") + end + + return new(key, "user", data[:secondary], attributes, private_attributes) + end + end +end diff --git a/lib/ldclient-rb/impl/context.rb b/lib/ldclient-rb/impl/context.rb new file mode 100644 index 00000000..b02a60d2 --- /dev/null +++ b/lib/ldclient-rb/impl/context.rb @@ -0,0 +1,23 @@ +module LaunchDarkly + module Impl + module Context + # + # @param kind + # @return [Boolean] + # + def self.validate_kind(kind) + return false unless kind.is_a?(String) + kind.match?(/^[\w.-]+$/) && kind != "kind" && kind != "multi" + end + + # + # @param key + # @return [Boolean] + # + def self.validate_key(key) + return false unless key.is_a?(String) + key != "" + end + end + end +end diff --git a/spec/context_spec.rb b/spec/context_spec.rb new file mode 100644 index 00000000..268e0120 --- /dev/null +++ b/spec/context_spec.rb @@ -0,0 +1,166 @@ +require "ldclient-rb/context" + +describe LaunchDarkly::LDContext do + subject { LaunchDarkly::LDContext } + + it "returns nil for any value if invalid" do + result = subject.create({key: "", kind: "user", name: "testing"}) + + expect(result.valid?).to be_falsey + + expect(result.key).to be_nil + expect(result.get_value(:key)).to be_nil + + expect(result.kind).to be_nil + expect(result.get_value(:kind)).to be_nil + + expect(result.get_value(:name)).to be_nil + end + + describe "legacy users contexts" do + it "can be created using the legacy user format" do + context = { + key: "user-key", + custom: { + address: { + street: "123 Main St.", + city: "Every City", + state: "XX", + }, + }, + } + result = subject.create(context) + expect(result).to be_a(LaunchDarkly::LDContext) + expect(result.key).to eq("user-key") + expect(result.kind).to eq("user") + expect(result.valid?).to be_truthy + end + + it "allows an empty string for a key, but it cannot be missing or nil" do + expect(subject.create({key: ""}).valid?).to be_truthy + expect(subject.create({key: nil}).valid?).to be_falsey + expect(subject.create({}).valid?).to be_falsey + end + + it "requires privateAttributeNames to be an array" do + context = { + key: "user-key", + privateAttributeNames: "not an array", + } + expect(subject.create(context).valid?).to be_falsey + end + + it "overwrite custom properties with built-ins when collisons occur" do + context = { + key: "user-key", + secondary: "secondary", + avatar: "avatar", + custom: { + secondary: "custom secondary", + avatar: "custom avatar", + }, + } + + result = subject.create(context) + expect(result.get_value(:secondary)).to eq("secondary") + expect(result.get_value(:avatar)).to eq("avatar") + end + end + + describe "single kind contexts" do + it "can be created using the new format" do + context = { + key: "launchdarkly", + kind: "org", + address: { + street: "1999 Harrison St Suite 1100", + city: "Oakland", + state: "CA", + zip: "94612", + }, + } + result = subject.create(context) + expect(result).to be_a(LaunchDarkly::LDContext) + expect(result.key).to eq("launchdarkly") + expect(result.kind).to eq("org") + expect(result.valid?).to be_truthy + end + + it "do not allow empty strings or nil values for keys" do + expect(subject.create({kind: "user", key: ""}).valid?).to be_falsey + expect(subject.create({kind: "user", key: nil}).valid?).to be_falsey + expect(subject.create({kind: "user"}).valid?).to be_falsey + end + + it "require privateAttributes to be an array" do + context = { + key: "user-key", + kind: "user", + _meta: { + privateAttributes: "not an array", + }, + } + expect(subject.create(context).valid?).to be_falsey + end + + it "overwrite secondary property if also specified at top level" do + context = { + key: "user-key", + kind: "user", + secondary: "invalid secondary", + _meta: { + secondary: "real secondary", + }, + } + + result = subject.create(context) + expect(result.get_value(:secondary)).to eq("real secondary") + end + end + + describe "multi-kind contexts" do + it "can be created from single kind contexts" do + user_context = subject.create({key: "user-key"}) + org_context = subject.create({key: "org-key", kind: "org"}) + multi_context = subject.create_multi([user_context, org_context]) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context.key).to be_nil + expect(multi_context.kind).to eq("multi") + expect(multi_context.valid?).to be_truthy + end + + it "will return the single kind context if only one is provided" do + user_context = subject.create({key: "user-key"}) + multi_context = subject.create_multi([user_context]) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context).to eq(user_context) + end + + it "cannot include another multi-kind context" do + user_context = subject.create({key: "user-key"}) + org_context = subject.create({key: "org-key", kind: "org"}) + embedded_multi_context = subject.create_multi([user_context, org_context]) + multi_context = subject.create_multi([embedded_multi_context]) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context.valid?).to be_falsey + end + + it "are invalid if no contexts are provided" do + multi_context = subject.create_multi([]) + expect(multi_context.valid?).to be_falsey + end + + it "are invalid if a single context is invalid" do + valid_context = subject.create({kind: "user", key: "user-key"}) + invalid_context = subject.create({kind: "org"}) + multi_context = subject.create_multi([valid_context, invalid_context]) + + expect(valid_context.valid?).to be_truthy + expect(invalid_context.valid?).to be_falsey + expect(multi_context.valid?).to be_falsey + end + end +end diff --git a/spec/impl/context_spec.rb b/spec/impl/context_spec.rb new file mode 100644 index 00000000..3e87f1f6 --- /dev/null +++ b/spec/impl/context_spec.rb @@ -0,0 +1,31 @@ +require "ldclient-rb/impl/context" + +describe LaunchDarkly::Impl::Context do + subject { LaunchDarkly::Impl::Context } + + it "can validate kind correctly" do + test_cases = [ + [:user, false, "Kind is not a string"], + ["kind", false, "Kind cannot be 'kind'"], + ["multi", false, "Kind cannot be 'multi'"], + ["user@type", false, "Kind cannot include invalid characters"], + ["org", true, "Some kinds are valid"], + ] + + test_cases.each do |input, expected, _descr| + expect(subject.validate_kind(input)).to eq(expected) + end + end + + it "can validate a key correctly" do + test_cases = [ + [:key, false, "Key is not a string"], + ["", false, "Key cannot be ''"], + ["key", true, "Some keys are valid"], + ] + + test_cases.each do |input, expected, _descr| + expect(subject.validate_kind(input)).to eq(expected) + end + end +end From 0362d35b4ae565a9cacba259962d8899706c7110 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 20 Oct 2022 17:07:50 -0400 Subject: [PATCH 255/292] Add reference based value retrieval (#207) This commit introduces the References type used for targeting complex attributes in the new LDContexts. References are expected to be created through static factory methods: ```ruby LaunchDarkly::Reference.create("/a/b") LaunchDarkly::Reference.create_literal("/a/b") ``` These references can be used to retrieve values from an existing LDContext ```ruby ref = LaunchDarkly::Reference.create("/a/b") result = context.get_value_for_reference(ref) ``` --- contract-tests/big_segment_store_fixture.rb | 2 +- lib/ldclient-rb.rb | 1 + lib/ldclient-rb/context.rb | 118 +++++-- lib/ldclient-rb/reference.rb | 274 ++++++++++++++++ spec/context_spec.rb | 338 +++++++++++++------- spec/reference_spec.rb | 110 +++++++ 6 files changed, 701 insertions(+), 142 deletions(-) create mode 100644 lib/ldclient-rb/reference.rb create mode 100644 spec/reference_spec.rb diff --git a/contract-tests/big_segment_store_fixture.rb b/contract-tests/big_segment_store_fixture.rb index 8f228e9a..db17be09 100644 --- a/contract-tests/big_segment_store_fixture.rb +++ b/contract-tests/big_segment_store_fixture.rb @@ -19,6 +19,6 @@ def get_membership(user_hash) end def stop - HTTP.delete("#{@uri}") + HTTP.delete(@uri) end end diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 2826b453..6a57c953 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -16,6 +16,7 @@ module LaunchDarkly require "ldclient-rb/in_memory_store" require "ldclient-rb/config" require "ldclient-rb/context" +require "ldclient-rb/reference" require "ldclient-rb/newrelic" require "ldclient-rb/stream" require "ldclient-rb/polling" diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 1b682f76..c50838a3 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -1,5 +1,6 @@ require 'set' require 'ldclient-rb/impl/context' +require 'ldclient-rb/reference' module LaunchDarkly # LDContext is a collection of attributes that can be referenced in flag @@ -62,33 +63,113 @@ def valid? @error.nil? end - # TODO: Update this method to support references. # - # This method will be changing in subsequent PRs. Eventually it will - # receive a Reference or a string that we will turn into a Reference and - # then we will use that new reference to retrieve the correct value. + # get_value looks up the value of any attribute of the Context by name. + # This includes only attributes that are addressable in evaluations-- not + # metadata such as private attributes. # - # However, I want to break this up into multiple PRs. So for now, this is - # doing some very basic lookups so I can verify the little bit of behavior - # I have so far. + # For a single-kind context, the attribute name can be any custom attribute. + # It can also be one of the built-in ones like "kind", "key", or "name". # - # Later work will update this code and the tests. + # TODO: Update this paragraph once we implement these methods in ruby # - # @param attribute [Symbol] + # For a multi-kind context, the only supported attribute name is "kind". + # Use individual_context_by_index(), individual_context_by_name(), or + # get_all_individual_contexts() to inspect a Context for a particular kind + # and then get its attributes. + # + # This method does not support complex expressions for getting individual + # values out of JSON objects or arrays, such as "/address/street". Use + # {#get_value_for_reference} for that purpose. + # + # If the value is found, the return value is the attribute value; + # otherwise, it is nil. + # + # @param attribute [String, Symbol] + # @return [any] # def get_value(attribute) + reference = Reference.create_literal(attribute) + get_value_for_reference(reference) + end + + # + # get_value_for_reference looks up the value of any attribute of the + # Context, or a value contained within an attribute, based on a {Reference} + # instance. This includes only attributes that are addressable in + # evaluations-- not metadata such as private attributes. + # + # This implements the same behavior that the SDK uses to resolve attribute + # references during a flag evaluation. In a single-kind context, the + # {Reference} can represent a simple attribute name-- either a built-in one + # like "name" or "key", or a custom attribute -- or, it can be a + # slash-delimited path using a JSON-Pointer-like syntax. See {Reference} + # for more details. + # + # TODO: Update this paragraph once we implement these methods in ruby + # + # For a multi-kind context, the only supported attribute name is "kind". + # Use individual_context_by_index(), individual_context_by_name(), or + # get_all_individual_contexts() to inspect a Context for a particular kind + # and then get its attributes. + # + # If the value is found, the return value is the attribute value; + # otherwise, it is nil. + # + # @param reference [Reference] + # @return [any] + # + def get_value_for_reference(reference) return nil unless valid? + return nil unless reference.is_a?(Reference) + return nil unless reference.error.nil? + + first_component = reference.component(0) + + if multi_kind? + if reference.depth == 1 && first_component == :kind + return kind + end - case attribute - when :key - @key - when :kind - @kind - when :secondary - @secondary - else - @attributes[attribute] + # Multi-kind contexts have no other addressable attributes + return nil end + + value = get_top_level_addressable_attribute_single_kind(first_component) + return nil if value.nil? + + (1...reference.depth).each do |i| + name = reference.component(i) + + return nil unless value.is_a?(Hash) + return nil unless value.has_key?(name) + + value = value[name] + end + + value + end + + # + # Retrieve the value of any top level, addressable attribute. + # + # This method returns an array of two values. The first element is the + # value of the requested attribute or nil if it does not exist. The second + # value will be true if the attribute exists; otherwise, it will be false. + # + # @param name [Symbol] + # @return [Array(any)] + # + private def get_top_level_addressable_attribute_single_kind(name) + if name == :kind + return kind + elsif name == :key + return key + elsif name == :secondary + return @secondary + end + + @attributes[name] end # @@ -205,6 +286,7 @@ def self.create_multi(contexts) return create_invalid_context("The key for the context was not valid") if key.nil? attributes = data[:custom].clone || {} + attributes[:anonymous] = false built_in_attributes = [:key, :ip, :email, :name, :avatar, :firstName, :lastName, :country, :anonymous] built_in_attributes.each do |attr| attributes[attr] = data[attr].clone if data.has_key? attr diff --git a/lib/ldclient-rb/reference.rb b/lib/ldclient-rb/reference.rb new file mode 100644 index 00000000..29a8f227 --- /dev/null +++ b/lib/ldclient-rb/reference.rb @@ -0,0 +1,274 @@ +module LaunchDarkly + # + # Reference is an attribute name or path expression identifying a value + # within a Context. + # + # This type is mainly intended to be used internally by LaunchDarkly SDK and + # service code, where efficiency is a major concern so it's desirable to do + # any parsing or preprocessing just once. Applications are unlikely to need + # to use the Reference type directly. + # + # It can be used to retrieve a value with LDContext.get_value_for_reference() + # or to identify an attribute or nested value that should be considered + # private. + # + # Parsing and validation are done at the time that the Reference is + # constructed. If a Reference instance was created from an invalid string, it + # is considered invalid and its {Reference#error} attribute will return a + # non-nil error. + # + # ## Syntax + # + # The string representation of an attribute reference in LaunchDarkly JSON + # data uses the following syntax: + # + # If the first character is not a slash, the string is interpreted literally + # as an attribute name. An attribute name can contain any characters, but + # must not be empty. + # + # If the first character is a slash, the string is interpreted as a + # slash-delimited path where the first path component is an attribute name, + # and each subsequent path component is the name of a property in a JSON + # object. Any instances of the characters "/" or "~" in a path component are + # escaped as "~1" or "~0" respectively. This syntax deliberately resembles + # JSON Pointer, but no JSON Pointer behaviors other than those mentioned here + # are supported. + # + # ## Examples + # + # Suppose there is a context whose JSON implementation looks like this: + # + # { + # "kind": "user", + # "key": "value1", + # "address": { + # "street": { + # "line1": "value2", + # "line2": "value3" + # }, + # "city": "value4" + # }, + # "good/bad": "value5" + # } + # + # The attribute references "key" and "/key" would both point to "value1". + # + # The attribute reference "/address/street/line1" would point to "value2". + # + # The attribute references "good/bad" and "/good~1bad" would both point to + # "value5". + # + class Reference + ERR_EMPTY = 'empty reference' + private_constant :ERR_EMPTY + + ERR_INVALID_ESCAPE_SEQUENCE = 'invalid escape sequence' + private_constant :ERR_INVALID_ESCAPE_SEQUENCE + + ERR_DOUBLE_TRAILING_SLASH = 'double or trailing slash' + private_constant :ERR_DOUBLE_TRAILING_SLASH + + # + # Returns nil for a valid Reference, or a non-nil error value for an + # invalid Reference. + # + # A Reference is invalid if the input string is empty, or starts with a + # slash but is not a valid slash-delimited path, or starts with a slash and + # contains an invalid escape sequence. + # + # Otherwise, the Reference is valid, but that does not guarantee that such + # an attribute exists in any given Context. For instance, + # Reference.create("name") is a valid Reference, but a specific Context + # might or might not have a name. + # + # See comments on the Reference type for more details of the attribute + # reference syntax. + # + # @return [String, nil] + # + attr_reader :error + + # + # Returns the attribute reference as a string, in the same format provided + # to {#create}. + # + # If the Reference was created with {#create}, this value is identical to + # the original string. If it was created with {#create_literal}, the value + # may be different due to unescaping (for instance, an attribute whose name + # is "/a" would be represented as "~1a"). + # + # @return [String, nil] + # + attr_reader :raw_path + + def initialize(raw_path, components = [], error = nil) + @raw_path = raw_path + # @type [Array] + @components = components + @error = error + end + private_class_method :new + + # + # Creates a Reference from a string. For the supported syntax and examples, + # see comments on the Reference type. + # + # This constructor always returns a Reference that preserves the original + # string, even if validation fails, so that accessing {#raw_path} (or + # serializing the Reference to JSON) will produce the original string. If + # validation fails, {#error} will return a non-nil error and any SDK method + # that takes this Reference as a parameter will consider it invalid. + # + # @param value [String, Symbol] + # @return [Reference] + # + def self.create(value) + unless value.is_a?(String) || value.is_a?(Symbol) + return new(value, [], ERR_EMPTY) + end + + value = value.to_s if value.is_a?(Symbol) + + return new(value, [], ERR_EMPTY) if value.empty? || value == "/" + + unless value.start_with? "/" + return new(value, [value.to_sym]) + end + + if value.end_with? "/" + return new(value, [], ERR_DOUBLE_TRAILING_SLASH) + end + + components = [] + value[1..].split("/").each do |component| + if component.empty? + return new(value, [], ERR_DOUBLE_TRAILING_SLASH) + end + + path, error = unescape_path(component) + + if error + return new(value, [], error) + end + + components << path.to_sym + end + + new(value, components) + end + + # + # create_literal is similar to {#create} except that it always + # interprets the string as a literal attribute name, never as a + # slash-delimited path expression. There is no escaping or unescaping, even + # if the name contains literal '/' or '~' characters. Since an attribute + # name can contain any characters, this method always returns a valid + # Reference unless the name is empty. + # + # For example: Reference.create_literal("name") is exactly equivalent to + # Reference.create("name"). Reference.create_literal("a/b") is exactly + # equivalent to Reference.create("a/b") (since the syntax used by {#create} + # treats the whole string as a literal as long as it does not start with a + # slash), or to Reference.create("/a~1b"). + # + # @param value [String, String] + # @return [Reference] + # + def self.create_literal(value) + unless value.is_a?(String) || value.is_a?(Symbol) + return new(value, [], ERR_EMPTY) + end + + value = value.to_s if value.is_a?(Symbol) + + return new(value, [], ERR_EMPTY) if value.empty? + return new(value, [value.to_sym]) if value[0] != '/' + + escaped = "/" + value.gsub('~', '~0').gsub('/', '~1') + new(escaped, [value.to_sym]) + end + + # + # Returns the number of path components in the Reference. + # + # For a simple attribute reference such as "name" with no leading slash, + # this returns 1. + # + # For an attribute reference with a leading slash, it is the number of + # slash-delimited path components after the initial slash. For instance, + # NewRef("/a/b").Depth() returns 2. + # + # @return [Integer] + # + def depth + @components.size + end + + # + # Retrieves a single path component from the attribute reference. + # + # For a simple attribute reference such as "name" with no leading slash, if + # index is zero, {#component} returns the attribute name as a symbol. + # + # For an attribute reference with a leading slash, if index is non-negative + # and less than {#depth}, Component returns the path component as a symbol. + # + # If index is out of range, it returns nil. + # + # Reference.create("a").component(0) # returns "a" + # Reference.create("/a/b").component(1) # returns "b" + # + # @param index [Integer] + # @return [Symbol, nil] + # + def component(index) + return nil if index < 0 || index >= depth + + @components[index] + end + + # + # Performs unescaping of attribute reference path components: + # + # "~1" becomes "/" + # "~0" becomes "~" + # "~" followed by any character other than "0" or "1" is invalid + # + # This method returns an array of two values. The first element of the + # array is the path if unescaping was valid; otherwise, it will be nil. The + # second value is an error string, or nil if the unescaping was successful. + # + # @param path [String] + # @return [Array([String, nil], [String, nil])] Returns a fixed size array. + # + private_class_method def self.unescape_path(path) + # If there are no tildes then there's definitely nothing to do + return path, nil unless path.include? '~' + + out = "" + i = 0 + while i < path.size + if path[i] != "~" + out << path[i] + i += 1 + next + end + + return nil, ERR_INVALID_ESCAPE_SEQUENCE if i + 1 == path.size + + case path[i + 1] + when '0' + out << "~" + when '1' + out << '/' + else + return nil, ERR_INVALID_ESCAPE_SEQUENCE + end + + i += 2 + end + + [out, nil] + end + end +end diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 268e0120..cc6cb326 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -4,7 +4,7 @@ subject { LaunchDarkly::LDContext } it "returns nil for any value if invalid" do - result = subject.create({key: "", kind: "user", name: "testing"}) + result = subject.create({ key: "", kind: "user", name: "testing" }) expect(result.valid?).to be_falsey @@ -17,150 +17,242 @@ expect(result.get_value(:name)).to be_nil end - describe "legacy users contexts" do - it "can be created using the legacy user format" do - context = { - key: "user-key", - custom: { - address: { - street: "123 Main St.", - city: "Every City", - state: "XX", + describe "context construction" do + describe "legacy users contexts" do + it "can be created using the legacy user format" do + context = { + key: "user-key", + custom: { + address: { + street: "123 Main St.", + city: "Every City", + state: "XX", + }, }, - }, - } - result = subject.create(context) - expect(result).to be_a(LaunchDarkly::LDContext) - expect(result.key).to eq("user-key") - expect(result.kind).to eq("user") - expect(result.valid?).to be_truthy - end + } + result = subject.create(context) + expect(result).to be_a(LaunchDarkly::LDContext) + expect(result.key).to eq("user-key") + expect(result.kind).to eq("user") + expect(result.valid?).to be_truthy + end - it "allows an empty string for a key, but it cannot be missing or nil" do - expect(subject.create({key: ""}).valid?).to be_truthy - expect(subject.create({key: nil}).valid?).to be_falsey - expect(subject.create({}).valid?).to be_falsey - end + it "allows an empty string for a key, but it cannot be missing or nil" do + expect(subject.create({ key: "" }).valid?).to be_truthy + expect(subject.create({ key: nil }).valid?).to be_falsey + expect(subject.create({}).valid?).to be_falsey + end - it "requires privateAttributeNames to be an array" do - context = { - key: "user-key", - privateAttributeNames: "not an array", - } - expect(subject.create(context).valid?).to be_falsey - end + it "requires privateAttributeNames to be an array" do + context = { + key: "user-key", + privateAttributeNames: "not an array", + } + expect(subject.create(context).valid?).to be_falsey + end - it "overwrite custom properties with built-ins when collisons occur" do - context = { - key: "user-key", - secondary: "secondary", - avatar: "avatar", - custom: { - secondary: "custom secondary", - avatar: "custom avatar", - }, - } - - result = subject.create(context) - expect(result.get_value(:secondary)).to eq("secondary") - expect(result.get_value(:avatar)).to eq("avatar") - end - end + it "overwrite custom properties with built-ins when collisons occur" do + context = { + key: "user-key", + secondary: "secondary", + avatar: "avatar", + custom: { + secondary: "custom secondary", + avatar: "custom avatar", + }, + } - describe "single kind contexts" do - it "can be created using the new format" do - context = { - key: "launchdarkly", - kind: "org", - address: { - street: "1999 Harrison St Suite 1100", - city: "Oakland", - state: "CA", - zip: "94612", - }, - } - result = subject.create(context) - expect(result).to be_a(LaunchDarkly::LDContext) - expect(result.key).to eq("launchdarkly") - expect(result.kind).to eq("org") - expect(result.valid?).to be_truthy + result = subject.create(context) + expect(result.get_value(:secondary)).to eq("secondary") + expect(result.get_value(:avatar)).to eq("avatar") + end end - it "do not allow empty strings or nil values for keys" do - expect(subject.create({kind: "user", key: ""}).valid?).to be_falsey - expect(subject.create({kind: "user", key: nil}).valid?).to be_falsey - expect(subject.create({kind: "user"}).valid?).to be_falsey - end + describe "single kind contexts" do + it "can be created using the new format" do + context = { + key: "launchdarkly", + kind: "org", + address: { + street: "1999 Harrison St Suite 1100", + city: "Oakland", + state: "CA", + zip: "94612", + }, + } + result = subject.create(context) + expect(result).to be_a(LaunchDarkly::LDContext) + expect(result.key).to eq("launchdarkly") + expect(result.kind).to eq("org") + expect(result.valid?).to be_truthy + end + + it "do not allow empty strings or nil values for keys" do + expect(subject.create({ kind: "user", key: "" }).valid?).to be_falsey + expect(subject.create({ kind: "user", key: nil }).valid?).to be_falsey + expect(subject.create({ kind: "user" }).valid?).to be_falsey + end + + it "require privateAttributes to be an array" do + context = { + key: "user-key", + kind: "user", + _meta: { + privateAttributes: "not an array", + }, + } + expect(subject.create(context).valid?).to be_falsey + end - it "require privateAttributes to be an array" do - context = { - key: "user-key", - kind: "user", - _meta: { - privateAttributes: "not an array", - }, - } - expect(subject.create(context).valid?).to be_falsey + it "overwrite secondary property if also specified at top level" do + context = { + key: "user-key", + kind: "user", + secondary: "invalid secondary", + _meta: { + secondary: "real secondary", + }, + } + + result = subject.create(context) + expect(result.get_value(:secondary)).to eq("real secondary") + end end - it "overwrite secondary property if also specified at top level" do - context = { - key: "user-key", - kind: "user", - secondary: "invalid secondary", - _meta: { - secondary: "real secondary", - }, - } - - result = subject.create(context) - expect(result.get_value(:secondary)).to eq("real secondary") + describe "multi-kind contexts" do + it "can be created from single kind contexts" do + user_context = subject.create({ key: "user-key" }) + org_context = subject.create({ key: "org-key", kind: "org" }) + multi_context = subject.create_multi([user_context, org_context]) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context.key).to be_nil + expect(multi_context.kind).to eq("multi") + expect(multi_context.valid?).to be_truthy + end + + it "will return the single kind context if only one is provided" do + user_context = subject.create({ key: "user-key" }) + multi_context = subject.create_multi([user_context]) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context).to eq(user_context) + end + + it "cannot include another multi-kind context" do + user_context = subject.create({ key: "user-key" }) + org_context = subject.create({ key: "org-key", kind: "org" }) + embedded_multi_context = subject.create_multi([user_context, org_context]) + multi_context = subject.create_multi([embedded_multi_context]) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context.valid?).to be_falsey + end + + it "are invalid if no contexts are provided" do + multi_context = subject.create_multi([]) + expect(multi_context.valid?).to be_falsey + end + + it "are invalid if a single context is invalid" do + valid_context = subject.create({ kind: "user", key: "user-key" }) + invalid_context = subject.create({ kind: "org" }) + multi_context = subject.create_multi([valid_context, invalid_context]) + + expect(valid_context.valid?).to be_truthy + expect(invalid_context.valid?).to be_falsey + expect(multi_context.valid?).to be_falsey + end end end - describe "multi-kind contexts" do - it "can be created from single kind contexts" do - user_context = subject.create({key: "user-key"}) - org_context = subject.create({key: "org-key", kind: "org"}) - multi_context = subject.create_multi([user_context, org_context]) + describe "value retrieval" do + describe "supports simple attribute retrieval" do + it "can retrieve the correct simple attribute value" do + context = subject.create({ key: "my-key", kind: "org", name: "x", :"my-attr" => "y", :"/starts-with-slash" => "z" }) - expect(multi_context).to be_a(LaunchDarkly::LDContext) - expect(multi_context.key).to be_nil - expect(multi_context.kind).to eq("multi") - expect(multi_context.valid?).to be_truthy - end + expect(context.get_value("kind")).to eq("org") + expect(context.get_value("key")).to eq("my-key") + expect(context.get_value("name")).to eq("x") + expect(context.get_value("my-attr")).to eq("y") + expect(context.get_value("/starts-with-slash")).to eq("z") + end + + it "does not allow querying subpath/elements" do + object_value = { a: 1 } + array_value = [1] - it "will return the single kind context if only one is provided" do - user_context = subject.create({key: "user-key"}) - multi_context = subject.create_multi([user_context]) + context = subject.create({ key: "my-key", kind: "org", :"obj-attr" => object_value, :"array-attr" => array_value }) + expect(context.get_value("obj-attr")).to eq(object_value) + expect(context.get_value(:"array-attr")).to eq(array_value) - expect(multi_context).to be_a(LaunchDarkly::LDContext) - expect(multi_context).to eq(user_context) + expect(context.get_value(:"/obj-attr/a")).to be_nil + expect(context.get_value(:"/array-attr/0")).to be_nil + end end - it "cannot include another multi-kind context" do - user_context = subject.create({key: "user-key"}) - org_context = subject.create({key: "org-key", kind: "org"}) - embedded_multi_context = subject.create_multi([user_context, org_context]) - multi_context = subject.create_multi([embedded_multi_context]) + describe "supports retrieval" do + it "with only support kind for multi-kind contexts" do + user_context = subject.create({ key: 'user', name: 'Ruby', anonymous: true }) + org_context = subject.create({ key: 'ld', kind: 'org', name: 'LaunchDarkly', anonymous: false }) - expect(multi_context).to be_a(LaunchDarkly::LDContext) - expect(multi_context.valid?).to be_falsey - end + multi_context = subject.create_multi([user_context, org_context]) - it "are invalid if no contexts are provided" do - multi_context = subject.create_multi([]) - expect(multi_context.valid?).to be_falsey - end + [ + ['kind', eq('multi')], + ['key', be_nil], + ['name', be_nil], + ['anonymous', be_nil], + ].each do |(reference, matcher)| + expect(multi_context.get_value_for_reference(LaunchDarkly::Reference.create(reference))).to matcher + end + end + + it "with basic attributes" do + legacy_user = subject.create({ key: 'user', name: 'Ruby', privateAttributeNames: ['name'] }) + org_context = subject.create({ key: 'ld', kind: 'org', name: 'LaunchDarkly', anonymous: true, _meta: { privateAttributes: ['name'] } }) + + [ + # Simple top level attributes are accessible + ['kind', eq('user'), eq('org')], + ['key', eq('user'), eq('ld')], + ['name', eq('Ruby'), eq('LaunchDarkly')], + ['anonymous', eq(false), eq(true)], + + # Cannot access meta data + ['privateAttributeNames', be_nil, be_nil], + ['privateAttributes', be_nil, be_nil], + ].each do |(reference, user_matcher, org_matcher)| + ref = LaunchDarkly::Reference.create(reference) + expect(legacy_user.get_value_for_reference(ref)).to user_matcher + expect(org_context.get_value_for_reference(ref)).to org_matcher + end + end + + it "with complex attributes" do + address = { city: "Oakland", state: "CA", zip: 94612 } + tags = ["LaunchDarkly", "Feature Flags"] + nested = { upper: { middle: { name: "Middle Level", inner: { levels: [0, 1, 2] } }, name: "Upper Level" } } + + legacy_user = subject.create({ key: 'user', name: 'Ruby', custom: { address: address, tags: tags, nested: nested }}) + org_context = subject.create({ key: 'ld', kind: 'org', name: 'LaunchDarkly', anonymous: true, address: address, tags: tags, nested: nested }) + + [ + # Simple top level attributes are accessible + ['/address', eq(address)], + ['/address/city', eq('Oakland')], - it "are invalid if a single context is invalid" do - valid_context = subject.create({kind: "user", key: "user-key"}) - invalid_context = subject.create({kind: "org"}) - multi_context = subject.create_multi([valid_context, invalid_context]) + ['/tags', eq(tags)], - expect(valid_context.valid?).to be_truthy - expect(invalid_context.valid?).to be_falsey - expect(multi_context.valid?).to be_falsey + ['/nested/upper/name', eq('Upper Level')], + ['/nested/upper/middle/name', eq('Middle Level')], + ['/nested/upper/middle/inner/levels', eq([0, 1, 2])], + ].each do |(reference, matcher)| + ref = LaunchDarkly::Reference.create(reference) + expect(legacy_user.get_value_for_reference(ref)).to matcher + expect(org_context.get_value_for_reference(ref)).to matcher + end + end end end end diff --git a/spec/reference_spec.rb b/spec/reference_spec.rb new file mode 100644 index 00000000..38b5e403 --- /dev/null +++ b/spec/reference_spec.rb @@ -0,0 +1,110 @@ +require "ldclient-rb/reference" + +describe LaunchDarkly::Reference do + subject { LaunchDarkly::Reference } + + it "determines invalid formats" do + [ + # Empty reference failures + [nil, 'empty reference'], + ["", 'empty reference'], + ["/", 'empty reference'], + + # Double or trailing slashes + ["//", 'double or trailing slash'], + ["/a//b", 'double or trailing slash'], + ["/a/b/", 'double or trailing slash'], + + # Invalid escape sequence + ["/a~x", 'invalid escape sequence'], + ["/a~", 'invalid escape sequence'], + ["/a/b~x", 'invalid escape sequence'], + ["/a/b~", 'invalid escape sequence'], + + ].each do |(path, msg)| + ref = subject.create(path) + expect(ref.raw_path).to eq(path) + expect(ref.error).to eq(msg) + end + end + + describe "can handle valid formats" do + it "can process references without a leading slash" do + %w[key kind name name/with/slashes name~0~1with-what-looks-like-escape-sequences].each do |path| + ref = subject.create(path) + + expect(ref.raw_path).to eq(path) + expect(ref.error).to be_nil + expect(ref.depth).to eq(1) + end + end + + it "can handle simple references with a leading slash" do + [ + ["/key", :key], + ["/0", :"0"], + ["/name~1with~1slashes~0and~0tildes", :"name/with/slashes~and~tildes"], + ].each do |(path, component)| + ref = subject.create(path) + + expect(ref.raw_path).to eq(path) + expect(ref.error).to be_nil + expect(ref.depth).to eq(1) + expect(ref.component(0)).to eq(component) + end + end + + it "can access sub-components of varying depths" do + [ + ["key", 1, 0, :key], + ["/key", 1, 0, :key], + + ["/a/b", 2, 0, :a], + ["/a/b", 2, 1, :b], + + ["/a~1b/c", 2, 0, :"a/b"], + ["/a~0b/c", 2, 0, :"a~b"], + + ["/a/10/20/30x", 4, 1, :"10"], + ["/a/10/20/30x", 4, 2, :"20"], + ["/a/10/20/30x", 4, 3, :"30x"], + + # invalid arguments don't cause an error, they just return nil + ["", 0, 0, nil], + ["", 0, -1, nil], + + ["key", 1, -1, nil], + ["key", 1, 1, nil], + + ["/key", 1, -1, nil], + ["/key", 1, 1, nil], + + ["/a/b", 2, -1, nil], + ["/a/b", 2, 2, nil], + ].each do |(path, depth, index, component)| + ref = subject.create(path) + expect(ref.depth).to eq(depth) + expect(ref.component(index)).to eq(component) + end + end + end + + describe "creating literal references" do + it "can create valid references" do + [ + ["name", "name"], + ["a/b", "a/b"], + ["/a/b~c", "/~1a~1b~0c"], + ["/", "/~1"], + ].each do |(literal, path)| + expect(subject.create_literal(literal).raw_path).to eq(subject.create(path).raw_path) + end + end + + it("can detect invalid references") do + [nil, "", true].each do |value| + expect(subject.create_literal(value).error).to eq('empty reference') + end + end + end +end From c459824df3f2c22bd5fa3b04ad21a44f7bffbccc Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Wed, 26 Oct 2022 08:38:29 -0400 Subject: [PATCH 256/292] Basic changes to use contexts in evaluations instead of users (#208) This commit follows the general approach of the [equivalent PHP SDK PR][pr]. This replaces `LDUser` with `LDContext` in the parameters for evaluations, and makes the minimum necessary adjustments to allow evaluations to keep working as before as long as the context kind is "user". None of the new behavior defined in the U2C spec is implemented yet. Generation of evaluation events is temporarily disabled because the event logic hasn't been updated yet. U2C contract tests for evaluations are partially enabled; a lot of functionality is still missing, but all the tests that only cover previously-existing evaluation behavior are passing. [pr]: https://github.com/launchdarkly/php-server-sdk-private/pull/103 --- Makefile | 28 ++- contract-tests/client_entity.rb | 10 +- contract-tests/testharness-suppressions.txt | 4 - lib/ldclient-rb/context.rb | 94 ++++++-- lib/ldclient-rb/impl/context.rb | 36 ++- lib/ldclient-rb/impl/evaluator.rb | 75 +++--- lib/ldclient-rb/impl/evaluator_bucketing.rb | 29 ++- lib/ldclient-rb/impl/evaluator_operators.rb | 29 --- lib/ldclient-rb/ldclient.rb | 84 +++---- spec/context_spec.rb | 26 ++ spec/impl/evaluator_big_segments_spec.rb | 6 +- spec/impl/evaluator_bucketing_spec.rb | 96 ++++---- spec/impl/evaluator_clause_spec.rb | 24 +- spec/impl/evaluator_operators_spec.rb | 35 --- spec/impl/evaluator_rule_spec.rb | 60 ++--- spec/impl/evaluator_segment_spec.rb | 8 +- spec/impl/evaluator_spec.rb | 108 ++++----- spec/impl/evaluator_spec_base.rb | 8 +- spec/ldclient_end_to_end_spec.rb | 163 ++++++------- spec/ldclient_evaluation_spec.rb | 45 +--- spec/ldclient_events_spec.rb | 251 ++++++++++---------- spec/mock_components.rb | 2 +- 22 files changed, 616 insertions(+), 605 deletions(-) delete mode 100644 contract-tests/testharness-suppressions.txt diff --git a/Makefile b/Makefile index 25811a67..708e71cd 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,29 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log +# TEST_HARNESS_PARAMS can be set to add -skip parameters for any contract tests that cannot yet pass +# Explanation of current skips: +# - "evaluation/parameterized/prerequisites": Can't pass yet because prerequisite cycle detection is not implemented. +# - various other "evaluation" subtests: These tests require context kind support. +# - "events": These test suites will be unavailable until more of the U2C implementation is done. +TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ + -skip 'evaluation/bucketing/bucket by non-key attribute' \ + -skip 'evaluation/bucketing/secondary' \ + -skip 'evaluation/bucketing/selection of context' \ + -skip 'evaluation/parameterized/attribute references' \ + -skip 'evaluation/parameterized/bad attribute reference errors' \ + -skip 'evaluation/parameterized/clause kind matching' \ + -skip 'evaluation/parameterized/prerequisites' \ + -skip 'evaluation/parameterized/segment match/included list is specific to user kind' \ + -skip 'evaluation/parameterized/segment match/includedContexts' \ + -skip 'evaluation/parameterized/segment match/excluded list is specific to user kind' \ + -skip 'evaluation/parameterized/segment match/excludedContexts' \ + -skip 'evaluation/parameterized/segment recursion' \ + -skip 'evaluation/parameterized/target match/context targets' \ + -skip 'evaluation/parameterized/target match/multi-kind' \ + -skip 'context type' \ + -skip 'big segments' \ + -skip 'events' + build-contract-tests: @cd contract-tests && bundle _2.2.33_ install @@ -11,8 +35,8 @@ start-contract-test-service-bg: @make start-contract-test-service >$(TEMP_TEST_OUTPUT) 2>&1 & run-contract-tests: - @curl -s https://raw.githubusercontent.com/launchdarkly/sdk-test-harness/v1.0.0/downloader/run.sh \ - | VERSION=v1 PARAMS="-url http://localhost:9000 -debug -stop-service-at-end -skip-from ./contract-tests/testharness-suppressions.txt $(TEST_HARNESS_PARAMS)" sh + @curl -s https://raw.githubusercontent.com/launchdarkly/sdk-test-harness/main/downloader/run.sh \ + | VERSION=v2 PARAMS="-url http://localhost:9000 -debug -stop-service-at-end $(TEST_HARNESS_PARAMS)" sh contract-tests: build-contract-tests start-contract-test-service-bg run-contract-tests diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 837b3a4a..f1c107ae 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -76,12 +76,12 @@ def evaluate(params) response = {} if params[:detail] - detail = @client.variation_detail(params[:flagKey], params[:user], params[:defaultValue]) + detail = @client.variation_detail(params[:flagKey], params[:context], params[:defaultValue]) response[:value] = detail.value response[:variationIndex] = detail.variation_index response[:reason] = detail.reason else - response[:value] = @client.variation(params[:flagKey], params[:user], params[:defaultValue]) + response[:value] = @client.variation(params[:flagKey], params[:context], params[:defaultValue]) end response @@ -93,15 +93,15 @@ def evaluate_all(params) opts[:with_reasons] = params[:withReasons] || false opts[:details_only_for_tracked_flags] = params[:detailsOnlyForTrackedFlags] || false - @client.all_flags_state(params[:user], opts) + @client.all_flags_state(params[:context], opts) end def track(params) - @client.track(params[:eventKey], params[:user], params[:data], params[:metricValue]) + @client.track(params[:eventKey], params[:context], params[:data], params[:metricValue]) end def identify(params) - @client.identify(params[:user]) + @client.identify(params[:context]) end def flush_events diff --git a/contract-tests/testharness-suppressions.txt b/contract-tests/testharness-suppressions.txt deleted file mode 100644 index 2b6f158b..00000000 --- a/contract-tests/testharness-suppressions.txt +++ /dev/null @@ -1,4 +0,0 @@ -events/alias events/from non-anonymous to non-anonymous -events/alias events/from non-anonymous to anonymous -events/alias events/from anonymous to non-anonymous -events/alias events/from anonymous to anonymous diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index c50838a3..4ca17b91 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -18,6 +18,9 @@ module LaunchDarkly # context, and the associated errors by calling {LDContext#valid?} and # {LDContext#error} class LDContext + KIND_DEFAULT = "user" + private_constant :KIND_DEFAULT + # @return [String] Returns the key for this context attr_reader :key @@ -31,15 +34,19 @@ class LDContext # @private # @param key [String] # @param kind [String] + # @param name [String, nil] + # @param anonymous [Boolean, nil] # @param secondary [String, nil] # @param attributes [Hash, nil] # @param private_attributes [Array, nil] # @param error [String, nil] # @param contexts [Array, nil] # - def initialize(key, kind, secondary = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) + def initialize(key, kind, name = nil, anonymous = nil, secondary = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) @key = key @kind = kind + @name = name + @anonymous = anonymous || false @secondary = secondary @attributes = attributes @private_attributes = private_attributes @@ -158,18 +165,23 @@ def get_value_for_reference(reference) # value will be true if the attribute exists; otherwise, it will be false. # # @param name [Symbol] - # @return [Array(any)] + # @return [any] # private def get_top_level_addressable_attribute_single_kind(name) - if name == :kind + case name + when :kind return kind - elsif name == :key + when :key return key - elsif name == :secondary + when :name + return @name + when :anonymous + return @anonymous + when :secondary return @secondary + else + @attributes&.fetch(name, nil) end - - @attributes[name] end # @@ -179,7 +191,7 @@ def get_value_for_reference(reference) # @param key [String] # @param kind [String] # - def self.with_key(key, kind = "user") + def self.with_key(key, kind = KIND_DEFAULT) create({key: key, kind: kind}) end @@ -209,20 +221,39 @@ def self.create(data) return create_invalid_context("The key (#{key || 'nil'}) was not valid for the provided context.") end + name = data[:name] + unless LaunchDarkly::Impl::Context.validate_name(name) + return create_invalid_context("The name value was set to a non-string value.") + end + + anonymous = data[:anonymous] + unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous) + return create_invalid_context("The anonymous value was set to a non-boolean value.") + end + meta = data.fetch(:_meta, {}) private_attributes = meta[:privateAttributes] if private_attributes && !private_attributes.is_a?(Array) return create_invalid_context("The provided private attributes are not an array") end - attributes = {} + # We only need to create an attribute hash if there are keys set outside + # of the ones we store in dedicated instance variables. + # + # :secondary is not a supported top level key in the new schema. + # However, someone could still include it so we need to ignore it. + attributes = nil data.each do |k, v| - # :secondary is not a supported top level key in the new schema. - # However, someone could still include it so we need to ignore it. - attributes[k] = v.clone unless [:key, :kind, :_meta, :secondary].include? k + case k + when :kind, :key, :name, :anonymous, :secondary, :_meta + next + else + attributes ||= {} + attributes[k] = v.clone + end end - new(key, kind, meta[:secondary], attributes, private_attributes) + new(key.to_s, kind, name, anonymous, meta[:secondary], attributes, private_attributes) end # @@ -264,7 +295,7 @@ def self.create_multi(contexts) return contexts[0] if contexts.length == 1 - new(nil, "multi", nil, nil, nil, nil, contexts) + new(nil, "multi", nil, false, nil, nil, nil, nil, contexts) end # @@ -272,7 +303,7 @@ def self.create_multi(contexts) # @return LDContext # private_class_method def self.create_invalid_context(error) - return new(nil, nil, nil, nil, nil, "Cannot create an LDContext. Provided data is not a hash.") + return new(nil, nil, nil, false, nil, nil, nil, "Cannot create an LDContext. Provided data is not a hash.") end # @@ -285,11 +316,32 @@ def self.create_multi(contexts) # Legacy users are allowed to have "" as a key but they cannot have nil as a key. return create_invalid_context("The key for the context was not valid") if key.nil? - attributes = data[:custom].clone || {} - attributes[:anonymous] = false - built_in_attributes = [:key, :ip, :email, :name, :avatar, :firstName, :lastName, :country, :anonymous] - built_in_attributes.each do |attr| - attributes[attr] = data[attr].clone if data.has_key? attr + name = data[:name] + unless LaunchDarkly::Impl::Context.validate_name(name) + return create_invalid_context("The name value was set to a non-string value.") + end + + anonymous = data[:anonymous] + unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous) + return create_invalid_context("The anonymous value was set to a non-boolean value.") + end + + custom = data[:custom] + unless custom.nil? || custom.is_a?(Hash) + return create_invalid_context("The custom value was set to a non-hash value.") + end + + # We only need to create an attribute hash if one of these keys exist. + # Everything else is stored in dedicated instance variables. + attributes = custom.clone + data.each do |k, v| + case k + when :ip, :email, :avatar, :firstName, :lastName, :country + attributes ||= {} + attributes[k] = v.clone + else + next + end end private_attributes = data[:privateAttributeNames] @@ -297,7 +349,7 @@ def self.create_multi(contexts) return create_invalid_context("The provided private attributes are not an array") end - return new(key, "user", data[:secondary], attributes, private_attributes) + return new(key.to_s, KIND_DEFAULT, name, anonymous, data[:secondary], attributes, private_attributes) end end end diff --git a/lib/ldclient-rb/impl/context.rb b/lib/ldclient-rb/impl/context.rb index b02a60d2..e309ec73 100644 --- a/lib/ldclient-rb/impl/context.rb +++ b/lib/ldclient-rb/impl/context.rb @@ -2,7 +2,22 @@ module LaunchDarkly module Impl module Context # - # @param kind + # We allow consumers of this SDK to provide us with either a Hash or an + # instance of an LDContext. This is convenient for them but not as much + # for us. To make the conversion slightly more convenient for us, we have + # created this method. + # + # @param context [Hash, LDContext] + # @return [LDContext] + # + def self.make_context(context) + return context if context.is_a?(LDContext) + + LDContext.create(context) + end + + # + # @param kind [any] # @return [Boolean] # def self.validate_kind(kind) @@ -11,13 +26,30 @@ def self.validate_kind(kind) end # - # @param key + # @param key [any] # @return [Boolean] # def self.validate_key(key) return false unless key.is_a?(String) key != "" end + + # + # @param name [any] + # @return [Boolean] + # + def self.validate_name(name) + name.nil? || name.is_a?(String) + end + + # + # @param anonymous [any] + # @return [Boolean] + # + def self.validate_anonymous(anonymous) + return true if anonymous.nil? + [true, false].include? anonymous + end end end end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index fc1a3c0d..43092a47 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -57,16 +57,11 @@ def self.error_result(errorKind, value = nil) # default value. Error conditions produce a result with a nil value and an error reason, not an exception. # # @param flag [Object] the flag - # @param user [Object] the user properties + # @param context [LDContext] the context properties # @return [EvalResult] the evaluation result - def evaluate(flag, user) + def evaluate(flag, context) result = EvalResult.new - if user.nil? || user[:key].nil? - result.detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED) - return result - end - - detail = eval_internal(flag, user, result) + detail = eval_internal(flag, context, result) if !result.big_segments_status.nil? # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at # some point and we will want to include the status in the evaluation reason. @@ -86,18 +81,18 @@ def self.make_big_segment_ref(segment) # method is visible for testing private - def eval_internal(flag, user, state) + def eval_internal(flag, context, state) if !flag[:on] return EvaluatorHelpers.off_result(flag) end - prereq_failure_result = check_prerequisites(flag, user, state) + prereq_failure_result = check_prerequisites(flag, context, state) return prereq_failure_result if !prereq_failure_result.nil? - # Check user target matches + # Check context target matches (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| - if value == user[:key] + if value == context.key return EvaluatorHelpers.target_match_result(target, flag) end end @@ -107,24 +102,24 @@ def eval_internal(flag, user, state) rules = flag[:rules] || [] rules.each_index do |i| rule = rules[i] - if rule_match_user(rule, user, state) + if rule_match_context(rule, context, state) reason = rule[:_reason] # try to use cached reason for this rule reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? - return get_value_for_variation_or_rollout(flag, rule, user, reason, + return get_value_for_variation_or_rollout(flag, rule, context, reason, EvaluatorHelpers.rule_precomputed_results(rule)) end end # Check the fallthrough rule if !flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, EvaluationReason::fallthrough, + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], context, EvaluationReason::fallthrough, EvaluatorHelpers.fallthrough_precomputed_results(flag)) end return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end - def check_prerequisites(flag, user, state) + def check_prerequisites(flag, context, state) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -135,7 +130,7 @@ def check_prerequisites(flag, user, state) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, state) + prereq_res = eval_internal(prereq_flag, context, state) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] @@ -156,32 +151,32 @@ def check_prerequisites(flag, user, state) nil end - def rule_match_user(rule, user, state) + def rule_match_context(rule, context, state) return false if !rule[:clauses] (rule[:clauses] || []).each do |clause| - return false if !clause_match_user(clause, user, state) + return false if !clause_match_context(clause, context, state) end return true end - def clause_match_user(clause, user, state) - # In the case of a segment match operator, we check if the user is in any of the segments, + def clause_match_context(clause, context, state) + # In the case of a segment match operator, we check if the context is in any of the segments, # and possibly negate if clause[:op].to_sym == :segmentMatch result = (clause[:values] || []).any? { |v| segment = @get_segment.call(v) - !segment.nil? && segment_match_user(segment, user, state) + !segment.nil? && segment_match_context(segment, context, state) } clause[:negate] ? !result : result else - clause_match_user_no_segments(clause, user) + clause_match_context_no_segments(clause, context) end end - def clause_match_user_no_segments(clause, user) - user_val = EvaluatorOperators.user_value(user, clause[:attribute]) + def clause_match_context_no_segments(clause, context) + user_val = context.get_value(clause[:attribute]) return false if user_val.nil? op = clause[:op].to_sym @@ -194,12 +189,12 @@ def clause_match_user_no_segments(clause, user) clause[:negate] ? !result : result end - def segment_match_user(segment, user, state) - return false unless user[:key] - segment[:unbounded] ? big_segment_match_user(segment, user, state) : simple_segment_match_user(segment, user, true) + def segment_match_context(segment, context, state) + return false unless context.key + segment[:unbounded] ? big_segment_match_context(segment, context, state) : simple_segment_match_context(segment, context, true) end - def big_segment_match_user(segment, user, state) + def big_segment_match_context(segment, context, state) if !segment[:generation] # Big segment queries can only be done if the generation is known. If it's unset, # that probably means the data store was populated by an older SDK that doesn't know @@ -209,7 +204,7 @@ def big_segment_match_user(segment, user, state) return false end if !state.big_segments_status - result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(user[:key]) + result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(context.key) if result state.big_segments_membership = result.membership state.big_segments_status = result.status @@ -222,40 +217,40 @@ def big_segment_match_user(segment, user, state) membership = state.big_segments_membership included = membership.nil? ? nil : membership[segment_ref] return included if !included.nil? - simple_segment_match_user(segment, user, false) + simple_segment_match_context(segment, context, false) end - def simple_segment_match_user(segment, user, use_includes_and_excludes) + def simple_segment_match_context(segment, context, use_includes_and_excludes) if use_includes_and_excludes - return true if segment[:included].include?(user[:key]) - return false if segment[:excluded].include?(user[:key]) + return true if segment[:included].include?(context.key) + return false if segment[:excluded].include?(context.key) end (segment[:rules] || []).each do |r| - return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) + return true if segment_rule_match_context(r, context, segment[:key], segment[:salt]) end return false end - def segment_rule_match_user(rule, user, segment_key, salt) + def segment_rule_match_context(rule, context, segment_key, salt) (rule[:clauses] || []).each do |c| - return false unless clause_match_user_no_segments(c, user) + return false unless clause_match_context_no_segments(c, context) end # If the weight is absent, this rule matches return true if !rule[:weight] # All of the clauses are met. See if the user buckets in - bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) + bucket = EvaluatorBucketing.bucket_context(context, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) weight = rule[:weight].to_f / 100000.0 return bucket < weight end private - def get_value_for_variation_or_rollout(flag, vr, user, reason, precomputed_results) - index, in_experiment = EvaluatorBucketing.variation_index_for_user(flag, vr, user) + def get_value_for_variation_or_rollout(flag, vr, context, reason, precomputed_results) + index, in_experiment = EvaluatorBucketing.variation_index_for_context(flag, vr, context) if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index 11842f74..bd3a238b 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -1,4 +1,3 @@ - module LaunchDarkly module Impl # Encapsulates the logic for percentage rollouts. @@ -7,9 +6,9 @@ module EvaluatorBucketing # # @param flag [Object] the feature flag # @param rule [Object] the rule - # @param user [Object] the user properties + # @param context [LDContext] the context properties # @return [Number] the variation index, or nil if there is an error - def self.variation_index_for_user(flag, rule, user) + def self.variation_index_for_context(flag, rule, context) variation = rule[:variation] return variation, false if !variation.nil? # fixed variation @@ -20,7 +19,7 @@ def self.variation_index_for_user(flag, rule, user) bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] seed = rollout[:seed] - bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt], seed) # may not be present + bucket = bucket_context(context, flag[:key], bucket_by, flag[:salt], seed) # may not be present sum = 0; variations.each do |variate| if rollout[:kind] == "experiment" && !variate[:untracked] @@ -32,11 +31,11 @@ def self.variation_index_for_user(flag, rule, user) return variate[:variation], !!in_experiment end end - # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due + # The context's bucket value was greater than or equal to the end of the last bucket. This could happen due # to a rounding error, or due to the fact that we are scaling to 100000 rather than 99999, or the flag # data could contain buckets that don't actually add up to 100000. Rather than returning an error in - # this case (or changing the scaling, which would potentially change the results for *all* users), we - # will simply put the user in the last bucket. + # this case (or changing the scaling, which would potentially change the results for *all* contexts), we + # will simply put the context in the last bucket. last_variation = variations[-1] in_experiment = rollout[:kind] == "experiment" && !last_variation[:untracked] @@ -46,23 +45,23 @@ def self.variation_index_for_user(flag, rule, user) end end - # Returns a user's bucket value as a floating-point value in `[0, 1)`. + # Returns a context's bucket value as a floating-point value in `[0, 1)`. # - # @param user [Object] the user properties + # @param context [LDContext] the context properties # @param key [String] the feature flag key (or segment key, if this is for a segment rule) - # @param bucket_by [String|Symbol] the name of the user attribute to be used for bucketing + # @param bucket_by [String|Symbol] the name of the context attribute to be used for bucketing # @param salt [String] the feature flag's or segment's salt value # @return [Number] the bucket value, from 0 inclusive to 1 exclusive - def self.bucket_user(user, key, bucket_by, salt, seed) - return nil unless user[:key] + def self.bucket_context(context, key, bucket_by, salt, seed) + return nil unless context.key - id_hash = bucketable_string_value(EvaluatorOperators.user_value(user, bucket_by)) + id_hash = bucketable_string_value(context.get_value(bucket_by)) if id_hash.nil? return 0.0 end - if user[:secondary] - id_hash += "." + user[:secondary].to_s + if context.get_value(:secondary) + id_hash += "." + context.get_value(:secondary).to_s end if seed diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index e54368e9..68e33a22 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -61,38 +61,9 @@ def self.apply(op, user_value, clause_value) end end - # Retrieves the value of a user attribute by name. - # - # Built-in attributes correspond to top-level properties in the user object. They are treated as strings and - # non-string values are coerced to strings, except for `anonymous` which is meant to be a boolean if present - # and is not currently coerced. This behavior is consistent with earlier versions of the Ruby SDK, but is not - # guaranteed to be consistent with other SDKs, since the evaluator specification is based on the strongly-typed - # SDKs where it is not possible for an attribute to have the wrong type. - # - # Custom attributes correspond to properties within the `custom` property, if any, and can be of any type. - # - # @param user [Object] the user properties - # @param attribute [String|Symbol] the attribute to get, for instance `:key` or `:name` or `:some_custom_attr` - # @return the attribute value, or nil if the attribute is unknown - def self.user_value(user, attribute) - attribute = attribute.to_sym - if BUILTINS.include? attribute - value = user[attribute] - return nil if value.nil? - (attribute == :anonymous) ? value : value.to_s - elsif !user[:custom].nil? - user[:custom][attribute] - else - nil - end - end - private - BUILTINS = Set[:key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") - - private_constant :BUILTINS private_constant :NUMERIC_VERSION_COMPONENTS_REGEX def self.string_op(user_value, clause_value, fn) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f74e4dd6..20558e62 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -119,18 +119,6 @@ def flush @event_processor.flush end - # - # @param key [String] the feature flag key - # @param user [Hash] the user properties - # @param default [Boolean] (false) the value to use if the flag cannot be evaluated - # @return [Boolean] the flag value - # @deprecated Use {#variation} instead. - # - def toggle?(key, user, default = false) - @config.logger.warn { "[LDClient] toggle? is deprecated. Use variation instead" } - variation(key, user, default) - end - # # Creates a hash string that can be used by the JavaScript SDK to identify a user. # For more information, see [Secure mode](https://docs.launchdarkly.com/sdk/features/secure-mode#ruby). @@ -164,6 +152,8 @@ def initialized? @config.offline? || @config.use_ldd? || @data_source.initialized? end + # + # TODO: TKTK # # Determines the variation of a feature flag to present to a user. # @@ -191,16 +181,18 @@ def initialized? # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard - # @param user [Hash] a hash containing parameters for the end user requesting the flag + # @param context [Hash, LDContext] a hash or LDContext instance describing the context requesting the flag # @param default the default value of the flag; this is used if there is an error # condition making it impossible to find or evaluate the flag # # @return the variation to show the user, or the default value if there's an an error # - def variation(key, user, default) - evaluate_internal(key, user, default, false).value + def variation(key, context, default) + evaluate_internal(key, context, default, false).value end + # + # TODO: TKTK # # Determines the variation of a feature flag for a user, like {#variation}, but also # provides additional information about how this value was calculated. @@ -218,14 +210,14 @@ def variation(key, user, default) # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard - # @param user [Hash] a hash containing parameters for the end user requesting the flag + # @param context [Hash, LDContext] a hash or object describing the context requesting the flag, # @param default the default value of the flag; this is used if there is an error # condition making it impossible to find or evaluate the flag # # @return [EvaluationDetail] an object describing the result # - def variation_detail(key, user, default) - evaluate_internal(key, user, default, true) + def variation_detail(key, context, default) + evaluate_internal(key, context, default, true) end # @@ -284,25 +276,11 @@ def track(event_name, user, data = nil, metric_value = nil) end # - # Returns all feature flag values for the given user. - # - # @deprecated Please use {#all_flags_state} instead. Current versions of the - # client-side SDK will not generate analytics events correctly if you pass the - # result of `all_flags`. - # - # @param user [Hash] The end user requesting the feature flags - # @return [Hash] a hash of feature flag keys to values - # - def all_flags(user) - all_flags_state(user).values_map - end - - # - # Returns a {FeatureFlagsState} object that encapsulates the state of all feature flags for a given user, + # Returns a {FeatureFlagsState} object that encapsulates the state of all feature flags for a given context, # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # - # @param user [Hash] The end user requesting the feature flags + # @param context [Hash, LDContext] a hash or object describing the context requesting the flags, # @param options [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. @@ -314,7 +292,7 @@ def all_flags(user) # of the JSON data if you are passing the flag state to the front end. # @return [FeatureFlagsState] a {FeatureFlagsState} object which can be serialized to JSON # - def all_flags_state(user, options={}) + def all_flags_state(context, options={}) return FeatureFlagsState.new(false) if @config.offline? if !initialized? @@ -326,8 +304,9 @@ def all_flags_state(user, options={}) end end - unless user && !user[:key].nil? - @config.logger.error { "[LDClient] User and user key must be specified in all_flags_state" } + context = Impl::Context::make_context(context) + unless context.valid? + @config.logger.error { "[LDClient] Context was invalid for all_flags_state (#{context.error})" } return FeatureFlagsState.new(false) end @@ -347,7 +326,7 @@ def all_flags_state(user, options={}) next end begin - detail = @evaluator.evaluate(f, user).detail + detail = @evaluator.evaluate(f, context).detail rescue => exn detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION)) Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) @@ -408,20 +387,22 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) end end + # @param context [Hash, LDContext] # @return [EvaluationDetail] - def evaluate_internal(key, user, default, with_reasons) + def evaluate_internal(key, context, default, with_reasons) if @config.offline? return Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) end - unless user - @config.logger.error { "[LDClient] Must specify user" } + if context.nil? + @config.logger.error { "[LDClient] Must specify context" } detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) return detail end - if user[:key].nil? - @config.logger.warn { "[LDClient] Variation called with nil user key; returning default value" } + context = Impl::Context::make_context(context) + unless context.valid? + @config.logger.error { "[LDClient] Context was invalid for flag evaluation (#{context.error}); returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) return detail end @@ -432,7 +413,8 @@ def evaluate_internal(key, user, default, with_reasons) else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) - record_unknown_flag_eval(key, user, default, detail.reason, with_reasons) + # TODO: Address when working on u2c events + # record_unknown_flag_eval(key, context, default, detail.reason, with_reasons) return detail end end @@ -442,27 +424,31 @@ def evaluate_internal(key, user, default, with_reasons) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_FLAG_NOT_FOUND, default) - record_unknown_flag_eval(key, user, default, detail.reason, with_reasons) + # TODO: Address when working on u2c events + # record_unknown_flag_eval(key, context, default, detail.reason, with_reasons) return detail end begin - res = @evaluator.evaluate(feature, user) + res = @evaluator.evaluate(feature, context) if !res.prereq_evals.nil? res.prereq_evals.each do |prereq_eval| - record_prereq_flag_eval(prereq_eval.prereq_flag, prereq_eval.prereq_of_flag, user, prereq_eval.detail, with_reasons) + # TODO: Address when working on u2c events + # record_prereq_flag_eval(prereq_eval.prereq_flag, prereq_eval.prereq_of_flag, context, prereq_eval.detail, with_reasons) end end detail = res.detail if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end - record_flag_eval(feature, user, detail, default, with_reasons) + # TODO: Address when working on u2c events + # record_flag_eval(feature, context, detail, default, with_reasons) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) - record_flag_eval_error(feature, user, default, detail.reason, with_reasons) + # TODO: Address when working on u2c events + # record_flag_eval_error(feature, context, default, detail.reason, with_reasons) return detail end end diff --git a/spec/context_spec.rb b/spec/context_spec.rb index cc6cb326..4f748405 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -43,6 +43,19 @@ expect(subject.create({}).valid?).to be_falsey end + it "anonymous is required to be a boolean or nil" do + expect(subject.create({ key: "" }).valid?).to be_truthy + expect(subject.create({ key: "", anonymous: true }).valid?).to be_truthy + expect(subject.create({ key: "", anonymous: false }).valid?).to be_truthy + expect(subject.create({ key: "", anonymous: 0 }).valid?).to be_falsey + end + + it "name is required to be a string or nil" do + expect(subject.create({ key: "" }).valid?).to be_truthy + expect(subject.create({ key: "", name: "My Name" }).valid?).to be_truthy + expect(subject.create({ key: "", name: 0 }).valid?).to be_falsey + end + it "requires privateAttributeNames to be an array" do context = { key: "user-key", @@ -93,6 +106,19 @@ expect(subject.create({ kind: "user" }).valid?).to be_falsey end + it "anonymous is required to be a boolean or nil" do + expect(subject.create({ key: "key", kind: "user" }).valid?).to be_truthy + expect(subject.create({ key: "key", kind: "user", anonymous: true }).valid?).to be_truthy + expect(subject.create({ key: "key", kind: "user", anonymous: false }).valid?).to be_truthy + expect(subject.create({ key: "key", kind: "user", anonymous: 0 }).valid?).to be_falsey + end + + it "name is required to be a string or nil" do + expect(subject.create({ key: "key", kind: "user" }).valid?).to be_truthy + expect(subject.create({ key: "key", kind: "user", name: "My Name" }).valid?).to be_truthy + expect(subject.create({ key: "key", kind: "user", name: 0 }).valid?).to be_falsey + end + it "require privateAttributes to be an array" do context = { key: "user-key", diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index 5b3c552b..0f246e4d 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -10,7 +10,7 @@ module Impl it "segment is not matched if there is no way to query it" do segment = factory.segment({ key: 'test', - included: [ user[:key] ], # included should be ignored for a big segment + included: [ user.key ], # included should be ignored for a big segment version: 1, unbounded: true, generation: 1, @@ -27,7 +27,7 @@ module Impl it "segment with no generation is not matched" do segment = factory.segment({ key: 'test', - included: [ user[:key] ], # included should be ignored for a big segment + included: [ user.key ], # included should be ignored for a big segment version: 1, unbounded: true, }) @@ -152,7 +152,7 @@ module Impl expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) - expect(queries).to eq([ user[:key] ]) + expect(queries).to eq([ user.key ]) end end end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index b0a94acb..690034ed 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -7,93 +7,93 @@ describe "seed exists" do let(:seed) { 61 } it "returns the expected bucket values for seed" do - user = { key: "userKeyA" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.09801207); - user = { key: "userKeyB" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.14483777); - user = { key: "userKeyC" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) + bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.9242641); end it "returns the same bucket regardless of hashKey and salt" do - user = { key: "userKeyA" } - bucket1 = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_user(user, "hashKey1", "key", "saltyB", seed) - bucket3 = subject.bucket_user(user, "hashKey2", "key", "saltyC", seed) + user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket1 = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(user, "hashKey1", "key", "saltyB", seed) + bucket3 = subject.bucket_context(user, "hashKey2", "key", "saltyC", seed) expect(bucket1).to eq(bucket2) expect(bucket2).to eq(bucket3) end it "returns a different bucket if the seed is not the same" do - user = { key: "userKeyA" } - bucket1 = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_user(user, "hashKey1", "key", "saltyB", seed+1) + user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket1 = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(user, "hashKey1", "key", "saltyB", seed+1) expect(bucket1).to_not eq(bucket2) end it "returns a different bucket if the user is not the same" do - user1 = { key: "userKeyA" } - user2 = { key: "userKeyB" } - bucket1 = subject.bucket_user(user1, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_user(user2, "hashKey1", "key", "saltyB", seed) + user1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + user2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + bucket1 = subject.bucket_context(user1, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(user2, "hashKey1", "key", "saltyB", seed) expect(bucket1).to_not eq(bucket2) end end it "gets expected bucket values for specific keys" do - user = { key: "userKeyA" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) + user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.42157587); - user = { key: "userKeyB" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) + user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.6708485); - user = { key: "userKeyC" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) + user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) + bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.10343106); end it "can bucket by int value (equivalent to string)" do - user = { + user = LaunchDarkly::LDContext.create({ key: "userkey", custom: { stringAttr: "33333", intAttr: 33333, }, - } - stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA", nil) - intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA", nil) + }) + stringResult = subject.bucket_context(user, "hashKey", "stringAttr", "saltyA", nil) + intResult = subject.bucket_context(user, "hashKey", "intAttr", "saltyA", nil) expect(intResult).to be_within(0.0000001).of(0.54771423) expect(intResult).to eq(stringResult) end it "cannot bucket by float value" do - user = { + user = LaunchDarkly::LDContext.create({ key: "userkey", custom: { floatAttr: 33.5, }, - } - result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA", nil) + }) + result = subject.bucket_context(user, "hashKey", "floatAttr", "saltyA", nil) expect(result).to eq(0.0) end it "cannot bucket by bool value" do - user = { + user = LaunchDarkly::LDContext.create({ key: "userkey", custom: { boolAttr: true, }, - } - result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA", nil) + }) + result = subject.bucket_context(user, "hashKey", "boolAttr", "saltyA", nil) expect(result).to eq(0.0) end end @@ -101,13 +101,13 @@ describe "variation_index_for_user" do context "rollout is not an experiment" do it "matches bucket" do - user = { key: "userkey" } + user = LaunchDarkly::LDContext.create({ key: "userkey" }) flag_key = "flagkey" salt = "salt" # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_context(user, flag_key, "key", salt, nil) * 100000).truncate() expect(bucket_value).to be > 0 expect(bucket_value).to be < 100000 @@ -125,17 +125,17 @@ } flag = { key: flag_key, salt: salt } - result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user) expect(result_variation).to be matched_variation expect(inExperiment).to be(false) end it "uses last bucket if bucket value is equal to total weight" do - user = { key: "userkey" } + user = LaunchDarkly::LDContext.create({ key: "userkey" }) flag_key = "flagkey" salt = "salt" - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_context(user, flag_key, "key", salt, nil) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { @@ -147,7 +147,7 @@ } flag = { key: flag_key, salt: salt } - result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user) expect(result_variation).to be 0 expect(inExperiment).to be(false) end @@ -156,9 +156,9 @@ context "rollout is an experiment" do it "returns whether user is in the experiment or not" do - user1 = { key: "userKeyA" } - user2 = { key: "userKeyB" } - user3 = { key: "userKeyC" } + user1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + user2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + user3 = LaunchDarkly::LDContext.create({ key: "userKeyC" }) flag_key = "flagkey" salt = "salt" seed = 61 @@ -177,24 +177,24 @@ } flag = { key: flag_key, salt: salt } - result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user1) + result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user1) expect(result_variation).to be(0) expect(inExperiment).to be(true) - result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user2) + result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user2) expect(result_variation).to be(2) expect(inExperiment).to be(true) - result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user3) + result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user3) expect(result_variation).to be(0) expect(inExperiment).to be(false) end it "uses last bucket if bucket value is equal to total weight" do - user = { key: "userkey" } + user = LaunchDarkly::LDContext.create({ key: "userkey" }) flag_key = "flagkey" salt = "salt" seed = 61 - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, seed) * 100000).truncate() + bucket_value = (subject.bucket_context(user, flag_key, "key", salt, seed) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { @@ -208,7 +208,7 @@ } flag = { key: flag_key, salt: salt } - result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user) expect(result_variation).to be 0 expect(inExperiment).to be(true) end diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb index facf68de..2207abdc 100644 --- a/spec/impl/evaluator_clause_spec.rb +++ b/spec/impl/evaluator_clause_spec.rb @@ -6,48 +6,48 @@ module Impl evaluator_tests_with_and_without_preprocessing "Evaluator (clauses)" do |desc, factory| describe "#{desc} - evaluate", :evaluator_spec_base => true do it "can match built-in attribute" do - user = { key: 'x', name: 'Bob' } + context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'name', op: 'in', values: ['Bob'] } flag = factory.boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be true + expect(basic_evaluator.evaluate(flag, context).detail.value).to be true end it "can match custom attribute" do - user = { key: 'x', name: 'Bob', custom: { legs: 4 } } + context = LDContext.create({ key: 'x', name: 'Bob', custom: { legs: 4 } }) clause = { attribute: 'legs', op: 'in', values: [4] } flag = factory.boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be true + expect(basic_evaluator.evaluate(flag, context).detail.value).to be true end it "returns false for missing attribute" do - user = { key: 'x', name: 'Bob' } + context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'legs', op: 'in', values: [4] } flag = factory.boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end it "returns false for unknown operator" do - user = { key: 'x', name: 'Bob' } + context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'name', op: 'unknown', values: [4] } flag = factory.boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end it "does not stop evaluating rules after clause with unknown operator" do - user = { key: 'x', name: 'Bob' } + context = LDContext.create({ key: 'x', name: 'Bob' }) clause0 = { attribute: 'name', op: 'unknown', values: [4] } rule0 = { clauses: [ clause0 ], variation: 1 } clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } rule1 = { clauses: [ clause1 ], variation: 1 } flag = factory.boolean_flag_with_rules([rule0, rule1]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be true + expect(basic_evaluator.evaluate(flag, context).detail.value).to be true end it "can be negated" do - user = { key: 'x', name: 'Bob' } + context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } flag = factory.boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user).detail.value).to be false + expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end end end diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index f57f5677..a38cb3f4 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -103,39 +103,4 @@ end end end - - describe "user_value" do - [:key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous, :some_custom_attr].each do |attr| - it "returns nil if property #{attr} is not defined" do - expect(subject::user_value({}, attr)).to be nil - end - end - - [:key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name].each do |attr| - it "gets string value of string property #{attr}" do - expect(subject::user_value({ attr => 'x' }, attr)).to eq 'x' - end - - it "coerces non-string value of property #{attr} to string" do - expect(subject::user_value({ attr => 3 }, attr)).to eq '3' - end - end - - it "gets boolean value of property anonymous" do - expect(subject::user_value({ anonymous: true }, :anonymous)).to be true - expect(subject::user_value({ anonymous: false }, :anonymous)).to be false - end - - it "does not coerces non-boolean value of property anonymous" do - expect(subject::user_value({ anonymous: 3 }, :anonymous)).to eq 3 - end - - it "gets string value of custom property" do - expect(subject::user_value({ custom: { some_custom_attr: 'x' } }, :some_custom_attr)).to eq 'x' - end - - it "gets non-string value of custom property" do - expect(subject::user_value({ custom: { some_custom_attr: 3 } }, :some_custom_attr)).to eq 3 - end - end end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 68e724cd..211090f7 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -5,12 +5,12 @@ module LaunchDarkly module Impl evaluator_tests_with_and_without_preprocessing "Evaluator (rules)" do |desc, factory| describe "#{desc} - evaluate", :evaluator_spec_base => true do - it "matches user from rules" do + it "matches context from rules" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -19,10 +19,10 @@ module Impl it "reuses rule match result detail instances" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) expect(result1.detail.reason.rule_id).to eq 'ruleid' expect(result1.detail).to be result2.detail end @@ -31,10 +31,10 @@ module Impl it "returns an error if rule variation is too high" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -42,10 +42,10 @@ module Impl it "returns an error if rule variation is negative" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -53,10 +53,10 @@ module Impl it "returns an error if rule has neither variation nor rollout" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -65,19 +65,19 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { variations: [] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end - it "coerces user key to a string for evaluation" do + it "coerces context key to a string for evaluation" do clause = { attribute: 'key', op: 'in', values: ['999'] } flag = factory.boolean_flag_with_clauses([clause]) - user = { key: 999 } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: 999 }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.value).to eq(true) end @@ -87,8 +87,8 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: "userkey", secondary: 999 }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) end @@ -97,9 +97,9 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -109,10 +109,10 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) expect(result1.detail).to eq(detail) expect(result2.detail).to be(result1.detail) end @@ -122,8 +122,8 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: "userkey", secondary: 999 }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -132,8 +132,8 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: "userkey", secondary: 999 }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -142,8 +142,8 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } flag = factory.boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: "userkey", secondary: 999 }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 60c58ade..3e87a662 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -33,20 +33,20 @@ def test_segment_match(factory, segment) it 'explicitly includes user' do segment = make_segment('segkey') - segment[:included] = [ user[:key] ] + segment[:included] = [ user.key ] expect(test_segment_match(factory, segment)).to be true end it 'explicitly excludes user' do segment = make_segment('segkey') - segment[:excluded] = [ user[:key] ] + segment[:excluded] = [ user.key ] expect(test_segment_match(factory, segment)).to be false end it 'both includes and excludes user; include takes priority' do segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - segment[:excluded] = [ user[:key] ] + segment[:included] = [ user.key ] + segment[:excluded] = [ user.key ] expect(test_segment_match(factory, segment)).to be true end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 8dfdde05..c43eb195 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -15,9 +15,9 @@ module Impl fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::off) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -29,9 +29,9 @@ module Impl fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::off) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -45,10 +45,10 @@ module Impl fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::off) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) expect(result1.detail).to eq(detail) expect(result2.detail).to be(result1.detail) end @@ -62,10 +62,10 @@ module Impl fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -78,10 +78,10 @@ module Impl fallthrough: { variation: 0 }, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -95,10 +95,10 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result = e.evaluate(flag, user) + result = e.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -113,11 +113,11 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result1 = e.evaluate(flag, user) + result1 = e.evaluate(flag, context) expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') - result2 = e.evaluate(flag, user) + result2 = e.evaluate(flag, context) expect(result2.detail).to be result1.detail end end @@ -140,13 +140,13 @@ module Impl variations: ['d', 'e'], version: 2, }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new(nil, nil, EvaluationReason::prerequisite_failed('feature2'))), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build - result = e.evaluate(flag, user) + result = e.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(expected_prereqs) end @@ -170,13 +170,13 @@ module Impl variations: ['d', 'e'], version: 2, }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::off)), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, user) + result = e.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(expected_prereqs) end @@ -198,13 +198,13 @@ module Impl variations: ['d', 'e'], version: 2, }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) expected_prereqs = [ PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('d', 0, EvaluationReason::fallthrough)), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, user) + result = e.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(expected_prereqs) end @@ -226,13 +226,13 @@ module Impl variations: ['d', 'e'], version: 2, }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) expected_prereqs = [ PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::fallthrough)), ] e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, user) + result = e.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(expected_prereqs) end @@ -249,9 +249,9 @@ module Impl { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] }, ], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -269,10 +269,10 @@ module Impl { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] }, ], }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) expect(result1.detail).to eq(detail) expect(result2.detail).to be(result1.detail) end @@ -286,9 +286,9 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -301,9 +301,9 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -316,9 +316,9 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -331,14 +331,14 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end - it "matches user from targets" do + it "matches context from targets" do flag = factory.flag({ key: 'feature', on: true, @@ -349,9 +349,9 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -368,10 +368,10 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } + context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) expect(result1.detail).to eq(detail) expect(result2.detail).to be(result1.detail) end @@ -387,9 +387,9 @@ module Impl variations: ['a', 'b', 'c'], version: 1, }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) - result = basic_evaluator.evaluate(flag, user) + result = basic_evaluator.evaluate(flag, context) expect(result.detail).to eq(detail) expect(result.prereq_evals).to eq(nil) end @@ -404,10 +404,10 @@ module Impl variations: ['a', 'b', 'c'], version: 1, }) - user = { key: 'x' } + context = LDContext.create({ key: 'x' }) detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) - result1 = basic_evaluator.evaluate(flag, user) - result2 = basic_evaluator.evaluate(flag, user) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) expect(result1.detail).to eq(detail) expect(result2.detail).to be(result1.detail) end @@ -421,8 +421,8 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: 'userkey' }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -435,8 +435,8 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: 'userkey' }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -449,8 +449,8 @@ module Impl offVariation: 1, variations: ['a', 'b', 'c'], }) - user = { key: 'userkey' } - result = basic_evaluator.evaluate(flag, user) + context = LDContext.create({ key: 'userkey' }) + result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 13a5f4a8..564095bc 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -50,7 +50,7 @@ def with_unknown_segment(key) end def with_big_segment_for_user(user, segment, included) - user_key = user[:key] + user_key = user.key @big_segment_memberships[user_key] = {} if !@big_segment_memberships.has_key?(user_key) @big_segment_memberships[user_key][Evaluator.make_big_segment_ref(segment)] = included self @@ -91,11 +91,11 @@ def build module EvaluatorSpecBase def user - { + LDContext::create({ key: "userkey", email: "test@example.com", name: "Bob", - } + }) end def logger @@ -110,7 +110,7 @@ def make_user_matching_clause(user, attr = :key) { attribute: attr.to_s, op: :in, - values: [ user[attr.to_sym] ], + values: [ user.get_value(attr) ], negate: false, } end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index a76fa866..d17a54bf 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -37,86 +37,89 @@ module LaunchDarkly end end - it "sends event without diagnostics" do - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - - config = test_config( - send_events: true, - events_uri: events_server.base_uri.to_s, - diagnostic_opt_out: true - ) - with_client(config) do |client| - client.identify(basic_user) - client.flush - - req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ sdk_key ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data.length).to eq 1 - expect(data[0]["kind"]).to eq "identify" - end - end - end - - it "sends diagnostic event" do - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - events_server.setup_ok_response("/diagnostic", "") - - config = test_config( - send_events: true, - events_uri: events_server.base_uri.to_s - ) - with_client(config) do |client| - client.identify(basic_user) - client.flush - - req0, body0 = events_server.await_request_with_body - req1, body1 = events_server.await_request_with_body - req = req0.path == "/diagnostic" ? req0 : req1 - body = req0.path == "/diagnostic" ? body0 : body1 - expect(req.header['authorization']).to eq [ sdk_key ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data["kind"]).to eq "diagnostic-init" - end - end - end - - it "can use socket factory" do - with_server do |poll_server| - with_server do |events_server| - events_server.setup_ok_response("/bulk", "") - poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - - config = test_config( - stream: false, - data_source: nil, - send_events: true, - base_uri: "http://fake-polling-server", - events_uri: "http://fake-events-server", - diagnostic_opt_out: true, - socket_factory: SocketFactoryFromHash.new({ - "fake-polling-server" => poll_server.port, - "fake-events-server" => events_server.port, - }) - ) - with_client(config) do |client| - client.identify(basic_user) - client.flush - - req, body = events_server.await_request_with_body - expect(req.header['authorization']).to eq [ sdk_key ] - expect(req.header['connection']).to eq [ "Keep-Alive" ] - data = JSON.parse(body) - expect(data.length).to eq 1 - expect(data[0]["kind"]).to eq "identify" - end - end - end - end + # TODO: Fix for u2c events + # it "sends event without diagnostics" do + # with_server do |events_server| + # events_server.setup_ok_response("/bulk", "") + # + # config = test_config( + # send_events: true, + # events_uri: events_server.base_uri.to_s, + # diagnostic_opt_out: true + # ) + # with_client(config) do |client| + # client.identify(basic_user) + # client.flush + # + # req, body = events_server.await_request_with_body + # expect(req.header['authorization']).to eq [ sdk_key ] + # expect(req.header['connection']).to eq [ "Keep-Alive" ] + # data = JSON.parse(body) + # expect(data.length).to eq 1 + # expect(data[0]["kind"]).to eq "identify" + # end + # end + # end + + # TODO: Fix for u2c events + # it "sends diagnostic event" do + # with_server do |events_server| + # events_server.setup_ok_response("/bulk", "") + # events_server.setup_ok_response("/diagnostic", "") + # + # config = test_config( + # send_events: true, + # events_uri: events_server.base_uri.to_s + # ) + # with_client(config) do |client| + # client.identify(basic_user) + # client.flush + # + # req0, body0 = events_server.await_request_with_body + # req1, body1 = events_server.await_request_with_body + # req = req0.path == "/diagnostic" ? req0 : req1 + # body = req0.path == "/diagnostic" ? body0 : body1 + # expect(req.header['authorization']).to eq [ sdk_key ] + # expect(req.header['connection']).to eq [ "Keep-Alive" ] + # data = JSON.parse(body) + # expect(data["kind"]).to eq "diagnostic-init" + # end + # end + # end + + # TODO: Fix for u2c events + # it "can use socket factory" do + # with_server do |poll_server| + # with_server do |events_server| + # events_server.setup_ok_response("/bulk", "") + # poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + # + # config = test_config( + # stream: false, + # data_source: nil, + # send_events: true, + # base_uri: "http://fake-polling-server", + # events_uri: "http://fake-events-server", + # diagnostic_opt_out: true, + # socket_factory: SocketFactoryFromHash.new({ + # "fake-polling-server" => poll_server.port, + # "fake-events-server" => events_server.port, + # }) + # ) + # with_client(config) do |client| + # client.identify(basic_user) + # client.flush + # + # req, body = events_server.await_request_with_body + # expect(req.header['authorization']).to eq [ sdk_key ] + # expect(req.header['connection']).to eq [ "Keep-Alive" ] + # data = JSON.parse(body) + # expect(data.length).to eq 1 + # expect(data[0]["kind"]).to eq "identify" + # end + # end + # end + # end # TODO: TLS tests with self-signed cert end diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index 6af21ba6..cd4e5981 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -44,7 +44,7 @@ module LaunchDarkly it "can evaluate a flag that references a segment" do td = Integrations::TestData.data_source - segment = SegmentBuilder.new("segmentkey").included(basic_user[:key]).build + segment = SegmentBuilder.new("segmentkey").included(basic_user.key).build td.use_preconfigured_segment(segment) td.use_preconfigured_flag( FlagBuilder.new("flagkey").on(true).variations(true, false).rule( @@ -66,7 +66,7 @@ module LaunchDarkly ).build) segstore = MockBigSegmentStore.new - segstore.setup_segment_for_user(basic_user[:key], segment, true) + segstore.setup_segment_for_user(basic_user.key, segment, true) big_seg_config = BigSegmentsConfig.new(store: segstore) with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| @@ -132,7 +132,7 @@ module LaunchDarkly ).build) segstore = MockBigSegmentStore.new - segstore.setup_segment_for_user(basic_user[:key], segment, true) + segstore.setup_segment_for_user(basic_user.key, segment, true) segstore.setup_metadata(Time.now) big_seg_config = BigSegmentsConfig.new(store: segstore) @@ -144,45 +144,6 @@ module LaunchDarkly end end - describe "all_flags" do - let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } - let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } - let(:test_data) { - td = Integrations::TestData.data_source - td.use_preconfigured_flag(flag1) - td.use_preconfigured_flag(flag2) - td - } - - it "returns flag values" do - with_client(test_config(data_source: test_data)) do |client| - result = client.all_flags({ key: 'userkey' }) - expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - end - end - - it "returns empty map for nil user" do - with_client(test_config(data_source: test_data)) do |client| - result = client.all_flags(nil) - expect(result).to eq({}) - end - end - - it "returns empty map for nil user key" do - with_client(test_config(data_source: test_data)) do |client| - result = client.all_flags({}) - expect(result).to eq({}) - end - end - - it "returns empty map if offline" do - with_client(test_config(data_source: test_data, offline: true)) do |offline_client| - result = offline_client.all_flags(nil) - expect(result).to eq({}) - end - end - end - context "all_flags_state" do let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index 8a4cca59..0a311bf6 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -5,6 +5,7 @@ require "model_builders" require "spec_helper" +# TODO: Fix all these commented out tests when addressing u2c events module LaunchDarkly describe "LDClient events tests" do def event_processor(client) @@ -18,26 +19,26 @@ def event_processor(client) end context "evaluation events - variation" do - it "unknown flag" do - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:record_eval_event).with( - basic_user, 'badkey', nil, nil, 'default', nil, 'default', false, nil, nil - ) - client.variation("badkey", basic_user, "default") - end - end - - it "known flag" do - td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - - with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:record_eval_event).with( - basic_user, 'flagkey', 1, 0, 'value', nil, 'default', false, nil, nil - ) - client.variation("flagkey", basic_user, "default") - end - end + # it "unknown flag" do + # with_client(test_config) do |client| + # expect(event_processor(client)).to receive(:record_eval_event).with( + # basic_user, 'badkey', nil, nil, 'default', nil, 'default', false, nil, nil + # ) + # client.variation("badkey", basic_user, "default") + # end + # end + + # it "known flag" do + # td = Integrations::TestData.data_source + # td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + # + # with_client(test_config(data_source: td)) do |client| + # expect(event_processor(client)).to receive(:record_eval_event).with( + # basic_user, 'flagkey', 1, 0, 'value', nil, 'default', false, nil, nil + # ) + # client.variation("flagkey", basic_user, "default") + # end + # end it "does not send event, and logs error, if user is nil" do td = Integrations::TestData.data_source @@ -52,79 +53,79 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user key is nil" do - td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - - logger = double().as_null_object - keyless_user = { key: nil } - - with_client(test_config(data_source: td, logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:record_eval_event) - expect(logger).to receive(:warn) - client.variation("flagkey", keyless_user, "default") - end - end - - it "sets trackEvents and reason if trackEvents is set for matched rule" do - td = Integrations::TestData.data_source - td.use_preconfigured_flag( - FlagBuilder.new("flagkey").version(100).on(true).variations("value") - .rule(RuleBuilder.new.variation(0).id("id").track_events(true) - .clause(Clauses.match_user(basic_user))) - .build - ) - - with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:record_eval_event).with( - basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::rule_match(0, 'id'), - 'default', true, nil, nil - ) - client.variation("flagkey", basic_user, "default") - end - end - - it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do - td = Integrations::TestData.data_source - td.use_preconfigured_flag( - FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0) - .track_events_fallthrough(true).build - ) - - with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:record_eval_event).with( - basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::fallthrough, - 'default', true, nil, nil - ) - client.variation("flagkey", basic_user, "default") - end - end + # it "does not send event, and logs warning, if user key is nil" do + # td = Integrations::TestData.data_source + # td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + # + # logger = double().as_null_object + # keyless_user = { key: nil } + # + # with_client(test_config(data_source: td, logger: logger)) do |client| + # expect(event_processor(client)).not_to receive(:record_eval_event) + # expect(logger).to receive(:warn) + # client.variation("flagkey", keyless_user, "default") + # end + # end + + # it "sets trackEvents and reason if trackEvents is set for matched rule" do + # td = Integrations::TestData.data_source + # td.use_preconfigured_flag( + # FlagBuilder.new("flagkey").version(100).on(true).variations("value") + # .rule(RuleBuilder.new.variation(0).id("id").track_events(true) + # .clause(Clauses.match_user(basic_user))) + # .build + # ) + # + # with_client(test_config(data_source: td)) do |client| + # expect(event_processor(client)).to receive(:record_eval_event).with( + # basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::rule_match(0, 'id'), + # 'default', true, nil, nil + # ) + # client.variation("flagkey", basic_user, "default") + # end + # end + + # it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + # td = Integrations::TestData.data_source + # td.use_preconfigured_flag( + # FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0) + # .track_events_fallthrough(true).build + # ) + # + # with_client(test_config(data_source: td)) do |client| + # expect(event_processor(client)).to receive(:record_eval_event).with( + # basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::fallthrough, + # 'default', true, nil, nil + # ) + # client.variation("flagkey", basic_user, "default") + # end + # end end context "evaluation events - variation_detail" do - it "unknown flag" do - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:record_eval_event).with( - basic_user, 'badkey', nil, nil, 'default', - LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND), - 'default', false, nil, nil - ) - client.variation_detail("badkey", basic_user, "default") - end - end - - it "known flag" do - td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) - - with_client(test_config(data_source: td)) do |client| - expect(event_processor(client)).to receive(:record_eval_event).with( - basic_user, 'flagkey', 1, 0, 'value', LaunchDarkly::EvaluationReason::off, - 'default', false, nil, nil - ) - client.variation_detail("flagkey", basic_user, "default") - end - end + # it "unknown flag" do + # with_client(test_config) do |client| + # expect(event_processor(client)).to receive(:record_eval_event).with( + # basic_user, 'badkey', nil, nil, 'default', + # LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND), + # 'default', false, nil, nil + # ) + # client.variation_detail("badkey", basic_user, "default") + # end + # end + + # it "known flag" do + # td = Integrations::TestData.data_source + # td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + # + # with_client(test_config(data_source: td)) do |client| + # expect(event_processor(client)).to receive(:record_eval_event).with( + # basic_user, 'flagkey', 1, 0, 'value', LaunchDarkly::EvaluationReason::off, + # 'default', false, nil, nil + # ) + # client.variation_detail("flagkey", basic_user, "default") + # end + # end it "does not send event, and logs error, if user is nil" do td = Integrations::TestData.data_source @@ -139,27 +140,27 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user key is nil" do - td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) - - logger = double().as_null_object - - with_client(test_config(data_source: td, logger: logger)) do |client| - expect(event_processor(client)).not_to receive(:record_eval_event) - expect(logger).to receive(:warn) - client.variation_detail("flagkey", { key: nil }, "default") - end - end + # it "does not send event, and logs warning, if user key is nil" do + # td = Integrations::TestData.data_source + # td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + # + # logger = double().as_null_object + # + # with_client(test_config(data_source: td, logger: logger)) do |client| + # expect(event_processor(client)).not_to receive(:record_eval_event) + # expect(logger).to receive(:warn) + # client.variation_detail("flagkey", { key: nil }, "default") + # end + # end end context "identify" do - it "queues up an identify event" do - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:record_identify_event).with(basic_user) - client.identify(basic_user) - end - end + # it "queues up an identify event" do + # with_client(test_config) do |client| + # expect(event_processor(client)).to receive(:record_identify_event).with(basic_user) + # client.identify(basic_user) + # end + # end it "does not send event, and logs warning, if user is nil" do logger = double().as_null_object @@ -183,23 +184,23 @@ def event_processor(client) end context "track" do - it "queues up an custom event" do - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:record_custom_event).with( - basic_user, 'custom_event_name', 42, nil - ) - client.track("custom_event_name", basic_user, 42) - end - end - - it "can include a metric value" do - with_client(test_config) do |client| - expect(event_processor(client)).to receive(:record_custom_event).with( - basic_user, 'custom_event_name', nil, 1.5 - ) - client.track("custom_event_name", basic_user, nil, 1.5) - end - end + # it "queues up an custom event" do + # with_client(test_config) do |client| + # expect(event_processor(client)).to receive(:record_custom_event).with( + # basic_user, 'custom_event_name', 42, nil + # ) + # client.track("custom_event_name", basic_user, 42) + # end + # end + + # it "can include a metric value" do + # with_client(test_config) do |client| + # expect(event_processor(client)).to receive(:record_custom_event).with( + # basic_user, 'custom_event_name', nil, 1.5 + # ) + # client.track("custom_event_name", basic_user, nil, 1.5) + # end + # end it "sanitizes the user in the event" do numeric_key_user = { key: 33 } diff --git a/spec/mock_components.rb b/spec/mock_components.rb index a4bcc7b0..d859bfac 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -35,7 +35,7 @@ def with_client(config) end def basic_user - { "key": "user-key" } + LaunchDarkly::LDContext::create({ "key": "user-key" }) end module LaunchDarkly From 6cc0f1b088036f1c10bb07310921151df589300f Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 27 Oct 2022 10:44:19 -0400 Subject: [PATCH 257/292] Support ContextKind in Clauses (#209) This commit follows the general approach of the [equivalent PHP SDK PR][pr]. The main features of this commit are: - introduction of `individual_context` and `individual_context_count` methods - context kind matching in clauses [pr]: https://github.com/launchdarkly/php-server-sdk-private/pull/108 --- Makefile | 1 - lib/ldclient-rb/context.rb | 203 ++++++++++++++++++++--------- lib/ldclient-rb/impl/evaluator.rb | 53 +++++++- lib/ldclient-rb/reference.rb | 2 +- spec/context_spec.rb | 78 +++++++++++ spec/impl/evaluator_clause_spec.rb | 33 +++++ 6 files changed, 302 insertions(+), 68 deletions(-) diff --git a/Makefile b/Makefile index 708e71cd..bde2b382 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,6 @@ TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ -skip 'evaluation/bucketing/selection of context' \ -skip 'evaluation/parameterized/attribute references' \ -skip 'evaluation/parameterized/bad attribute reference errors' \ - -skip 'evaluation/parameterized/clause kind matching' \ -skip 'evaluation/parameterized/prerequisites' \ -skip 'evaluation/parameterized/segment match/included list is specific to user kind' \ -skip 'evaluation/parameterized/segment match/includedContexts' \ diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 4ca17b91..2bf84901 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -19,12 +19,12 @@ module LaunchDarkly # {LDContext#error} class LDContext KIND_DEFAULT = "user" - private_constant :KIND_DEFAULT + KIND_MULTI = "multi" - # @return [String] Returns the key for this context + # @return [String, nil] Returns the key for this context attr_reader :key - # @return [String] Returns the kind for this context + # @return [String, nil] Returns the kind for this context attr_reader :kind # @return [String, nil] Returns the error associated with this LDContext if invalid @@ -32,8 +32,8 @@ class LDContext # # @private - # @param key [String] - # @param kind [String] + # @param key [String, nil] + # @param kind [String, nil] # @param name [String, nil] # @param anonymous [Boolean, nil] # @param secondary [String, nil] @@ -78,12 +78,9 @@ def valid? # For a single-kind context, the attribute name can be any custom attribute. # It can also be one of the built-in ones like "kind", "key", or "name". # - # TODO: Update this paragraph once we implement these methods in ruby - # # For a multi-kind context, the only supported attribute name is "kind". - # Use individual_context_by_index(), individual_context_by_name(), or - # get_all_individual_contexts() to inspect a Context for a particular kind - # and then get its attributes. + # Use {#individual_context} to inspect a Context for a particular kind and + # then get its attributes. # # This method does not support complex expressions for getting individual # values out of JSON objects or arrays, such as "/address/street". Use @@ -113,12 +110,9 @@ def get_value(attribute) # slash-delimited path using a JSON-Pointer-like syntax. See {Reference} # for more details. # - # TODO: Update this paragraph once we implement these methods in ruby - # # For a multi-kind context, the only supported attribute name is "kind". - # Use individual_context_by_index(), individual_context_by_name(), or - # get_all_individual_contexts() to inspect a Context for a particular kind - # and then get its attributes. + # Use {#individual_context} to inspect a Context for a particular kind and + # then get its attributes. # # If the value is found, the return value is the attribute value; # otherwise, it is nil. @@ -132,6 +126,7 @@ def get_value_for_reference(reference) return nil unless reference.error.nil? first_component = reference.component(0) + return nil if first_component.nil? if multi_kind? if reference.depth == 1 && first_component == :kind @@ -157,6 +152,69 @@ def get_value_for_reference(reference) value end + # + # Returns the number of context kinds in this context. + # + # For a valid individual context, this returns 1. For a multi-context, it + # returns the number of context kinds. For an invalid context, it returns + # zero. + # + # @return [Integer] the number of context kinds + # + def individual_context_count + return 0 unless valid? + return 1 if @contexts.nil? + @contexts.count + end + + # + # Returns the single-kind LDContext corresponding to one of the kinds in + # this context. + # + # The `kind` parameter can be either a number representing a zero-based + # index, or a string representing a context kind. + # + # If this method is called on a single-kind LDContext, then the only + # allowable value for `kind` is either zero or the same value as {#kind}, + # and the return value on success is the same LDContext. + # + # If the method is called on a multi-context, and `kind` is a number, it + # must be a non-negative index that is less than the number of kinds (that + # is, less than the return value of {#individual_context_count}, and the + # return value on success is one of the individual LDContexts within. Or, + # if `kind` is a string, it must match the context kind of one of the + # individual contexts. + # + # If there is no context corresponding to `kind`, the method returns nil. + # + # @param kind [Integer, String] the index or string value of a context kind + # @return [LDContext, nil] the context corresponding to that index or kind, + # or null if none. + # + def individual_context(kind) + return nil unless valid? + + if kind.is_a?(Integer) + unless multi_kind? + return kind == 0 ? self : nil + end + + return kind >= 0 && kind < @contexts.count ? @contexts[kind] : nil + end + + return nil unless kind.is_a?(String) + + unless multi_kind? + return self.kind == kind ? self : nil + end + + @contexts.each do |context| + return context if context.kind == kind + end + + nil + end + # # Retrieve the value of any top level, addressable attribute. # @@ -209,51 +267,20 @@ def self.with_key(key, kind = KIND_DEFAULT) # def self.create(data) return create_invalid_context("Cannot create an LDContext. Provided data is not a hash.") unless data.is_a?(Hash) - return create_context_from_legacy_data(data) unless data.has_key?(:kind) + return create_legacy_context(data) unless data.has_key?(:kind) kind = data[:kind] - unless LaunchDarkly::Impl::Context.validate_kind(kind) - create_invalid_context("The kind (#{kind || 'nil'}) was not valid for the provided context.") - end - - key = data[:key] - unless LaunchDarkly::Impl::Context.validate_key(key) - return create_invalid_context("The key (#{key || 'nil'}) was not valid for the provided context.") - end - - name = data[:name] - unless LaunchDarkly::Impl::Context.validate_name(name) - return create_invalid_context("The name value was set to a non-string value.") - end - - anonymous = data[:anonymous] - unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous) - return create_invalid_context("The anonymous value was set to a non-boolean value.") - end - - meta = data.fetch(:_meta, {}) - private_attributes = meta[:privateAttributes] - if private_attributes && !private_attributes.is_a?(Array) - return create_invalid_context("The provided private attributes are not an array") - end - - # We only need to create an attribute hash if there are keys set outside - # of the ones we store in dedicated instance variables. - # - # :secondary is not a supported top level key in the new schema. - # However, someone could still include it so we need to ignore it. - attributes = nil - data.each do |k, v| - case k - when :kind, :key, :name, :anonymous, :secondary, :_meta - next - else - attributes ||= {} - attributes[k] = v.clone + if kind == KIND_MULTI + contexts = [] + data.each do |key, value| + next if key == :kind + contexts << create_single_context(value, key.to_s) end + + return create_multi(contexts) end - new(key.to_s, kind, name, anonymous, meta[:secondary], attributes, private_attributes) + create_single_context(data, kind) end # @@ -271,8 +298,8 @@ def self.create(data) # context, this method will return the single-kind context instead of a new # multi-kind context wrapping that one single-kind. # - # @param contexts [Array] - # @return LDContext + # @param contexts [Array] + # @return [LDContext] # def self.create_multi(contexts) return create_invalid_context("Multi-kind context requires an array of LDContexts") unless contexts.is_a?(Array) @@ -300,17 +327,17 @@ def self.create_multi(contexts) # # @param error [String] - # @return LDContext + # @return [LDContext] # private_class_method def self.create_invalid_context(error) - return new(nil, nil, nil, false, nil, nil, nil, "Cannot create an LDContext. Provided data is not a hash.") + return new(nil, nil, nil, false, nil, nil, nil, error) end # # @param data [Hash] - # @return LDContext + # @return [LDContext] # - private_class_method def self.create_context_from_legacy_data(data) + private_class_method def self.create_legacy_context(data) key = data[:key] # Legacy users are allowed to have "" as a key but they cannot have nil as a key. @@ -351,5 +378,59 @@ def self.create_multi(contexts) return new(key.to_s, KIND_DEFAULT, name, anonymous, data[:secondary], attributes, private_attributes) end + + # + # @param data [Hash] + # @param kind [String] + # @return [LaunchDarkly::LDContext] + # + private_class_method def self.create_single_context(data, kind) + unless data.is_a?(Hash) + return create_invalid_context("The provided data was not a hash") + end + + unless LaunchDarkly::Impl::Context.validate_kind(kind) + return create_invalid_context("The kind (#{kind || 'nil'}) was not valid for the provided context.") + end + + key = data[:key] + unless LaunchDarkly::Impl::Context.validate_key(key) + return create_invalid_context("The key (#{key || 'nil'}) was not valid for the provided context.") + end + + name = data[:name] + unless LaunchDarkly::Impl::Context.validate_name(name) + return create_invalid_context("The name value was set to a non-string value.") + end + + anonymous = data[:anonymous] + unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous) + return create_invalid_context("The anonymous value was set to a non-boolean value.") + end + + meta = data.fetch(:_meta, {}) + private_attributes = meta[:privateAttributes] + if private_attributes && !private_attributes.is_a?(Array) + return create_invalid_context("The provided private attributes are not an array") + end + + # We only need to create an attribute hash if there are keys set outside + # of the ones we store in dedicated instance variables. + # + # :secondary is not a supported top level key in the new schema. + # However, someone could still include it so we need to ignore it. + attributes = nil + data.each do |k, v| + case k + when :kind, :key, :name, :anonymous, :secondary, :_meta + next + else + attributes ||= {} + attributes[k] = v.clone + end + end + + new(key.to_s, kind, name, anonymous, meta[:secondary], attributes, private_attributes) + end end end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 43092a47..d7b85467 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -175,16 +175,59 @@ def clause_match_context(clause, context, state) end end + # + # @param clause [Hash] + # @param context_value [any] + # @return [Boolean] + # + private def match_any_clause_value(clause, context_value) + op = clause[:op].to_sym + clause[:values].any? { |cv| EvaluatorOperators.apply(op, context_value, cv) } + end + + # + # @param clause [Hash] + # @param context [LaunchDarkly::LDContext] + # @return [Boolean] + # + private def clause_match_by_kind(clause, context) + # If attribute is "kind", then we treat operator and values as a match + # expression against a list of all individual kinds in the context. + # That is, for a multi-kind context with kinds of "org" and "user", it + # is a match if either of those strings is a match with Operator and + # Values. + + (0...context.individual_context_count).each do |i| + c = context.individual_context(i) + if !c.nil? && match_any_clause_value(clause, c.kind) + return true + end + end + + false + end + + # + # @param clause [Hash] + # @param context [LaunchDarkly::LDContext] + # @return [Boolean] + # def clause_match_context_no_segments(clause, context) - user_val = context.get_value(clause[:attribute]) + if clause[:attribute] == "kind" + result = clause_match_by_kind(clause, context) + return clause[:negate] ? !result : result + end + + matched_context = context.individual_context(clause[:contextKind] || LaunchDarkly::LDContext::KIND_DEFAULT) + return false if matched_context.nil? + + user_val = matched_context.get_value(clause[:attribute]) return false if user_val.nil? - op = clause[:op].to_sym - clause_vals = clause[:values] result = if user_val.is_a? Enumerable - user_val.any? { |uv| clause_vals.any? { |cv| EvaluatorOperators.apply(op, uv, cv) } } + user_val.any? { |uv| match_any_clause_value(clause, uv) } else - clause_vals.any? { |cv| EvaluatorOperators.apply(op, user_val, cv) } + match_any_clause_value(clause, user_val) end clause[:negate] ? !result : result end diff --git a/lib/ldclient-rb/reference.rb b/lib/ldclient-rb/reference.rb index 29a8f227..26595c74 100644 --- a/lib/ldclient-rb/reference.rb +++ b/lib/ldclient-rb/reference.rb @@ -171,7 +171,7 @@ def self.create(value) # treats the whole string as a literal as long as it does not start with a # slash), or to Reference.create("/a~1b"). # - # @param value [String, String] + # @param value [String, Symbol] # @return [Reference] # def self.create_literal(value) diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 4f748405..60d1df44 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -106,6 +106,13 @@ expect(subject.create({ kind: "user" }).valid?).to be_falsey end + it "does not allow reserved names or empty values for kind" do + expect(subject.create({ kind: true, key: "key" }).valid?).to be_falsey + expect(subject.create({ kind: "", key: "key" }).valid?).to be_falsey + expect(subject.create({ kind: "kind", key: "key" }).valid?).to be_falsey + expect(subject.create({ kind: "multi", key: "key" }).valid?).to be_falsey + end + it "anonymous is required to be a boolean or nil" do expect(subject.create({ key: "key", kind: "user" }).valid?).to be_truthy expect(subject.create({ key: "key", kind: "user", anonymous: true }).valid?).to be_truthy @@ -157,6 +164,16 @@ expect(multi_context.valid?).to be_truthy end + it "can be created from a hash" do + data = {kind: "multi", user: {key: "user-key"}, org: {key: "org-key"}} + multi_context = subject.create(data) + + expect(multi_context).to be_a(LaunchDarkly::LDContext) + expect(multi_context.key).to be_nil + expect(multi_context.kind).to eq(LaunchDarkly::LDContext::KIND_MULTI) + expect(multi_context.valid?).to be_truthy + end + it "will return the single kind context if only one is provided" do user_context = subject.create({ key: "user-key" }) multi_context = subject.create_multi([user_context]) @@ -192,6 +209,67 @@ end end + describe "context counts" do + it "invalid contexts have a size of 0" do + context = subject.create({}) + + expect(context.valid?).to be_falsey + expect(context.individual_context_count).to eq(0) + end + + it "individual contexts have a size of 1" do + context = subject.create({ kind: "user", key: "user-key" }) + expect(context.individual_context_count).to eq(1) + end + + it "multi-kind contexts have a size equal to the single-kind contexts" do + user_context = subject.create({ key: "user-key", kind: "user" }) + org_context = subject.create({ key: "org-key", kind: "org" }) + multi_context = subject.create_multi([user_context, org_context]) + + expect(multi_context.individual_context_count).to eq(2) + end + end + + describe "retrieving specific contexts" do + it "invalid contexts always return nil" do + context = subject.create({kind: "user"}) + + expect(context.valid?).to be_falsey + expect(context.individual_context(-1)).to be_nil + expect(context.individual_context(0)).to be_nil + expect(context.individual_context(1)).to be_nil + + expect(context.individual_context("user")).to be_nil + end + + it "single contexts can retrieve themselves" do + context = subject.create({key: "user-key", kind: "user"}) + + expect(context.valid?).to be_truthy + expect(context.individual_context(-1)).to be_nil + expect(context.individual_context(0)).to eq(context) + expect(context.individual_context(1)).to be_nil + + expect(context.individual_context("user")).to eq(context) + expect(context.individual_context("org")).to be_nil + end + + it "multi-kind contexts can return nested contexts" do + user_context = subject.create({ key: "user-key", kind: "user" }) + org_context = subject.create({ key: "org-key", kind: "org" }) + multi_context = subject.create_multi([user_context, org_context]) + + expect(multi_context.valid?).to be_truthy + expect(multi_context.individual_context(-1)).to be_nil + expect(multi_context.individual_context(0)).to eq(user_context) + expect(multi_context.individual_context(1)).to eq(org_context) + + expect(multi_context.individual_context("user")).to eq(user_context) + expect(multi_context.individual_context("org")).to eq(org_context) + end + end + describe "value retrieval" do describe "supports simple attribute retrieval" do it "can retrieve the correct simple attribute value" do diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb index 2207abdc..75eb277f 100644 --- a/spec/impl/evaluator_clause_spec.rb +++ b/spec/impl/evaluator_clause_spec.rb @@ -49,6 +49,39 @@ module Impl flag = factory.boolean_flag_with_clauses([clause]) expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end + + it "clause match uses context kind" do + clause = { contextKind: 'company', attribute: 'name', op: 'in', values: ['Catco'] } + + context1 = LDContext.create({ key: 'cc', kind: 'company', name: 'Catco'}) + context2 = LDContext.create({ key: 'l', kind: 'user', name: 'Lucy' }) + context3 = LDContext.create_multi([context1, context2]) + + flag = factory.boolean_flag_with_clauses([clause]) + + expect(basic_evaluator.evaluate(flag, context1).detail.value).to be true + expect(basic_evaluator.evaluate(flag, context2).detail.value).to be false + expect(basic_evaluator.evaluate(flag, context3).detail.value).to be true + end + + it "clause match by kind attribute" do + clause = { attribute: 'kind', op: 'startsWith', values: ['a'] } + + context1 = LDContext.create({ key: 'key' }) + context2 = LDContext.create({ key: 'key', kind: 'ab' }) + context3 = LDContext.create_multi( + [ + LDContext.create({ key: 'key', kind: 'cd' }), + LDContext.create({ key: 'key', kind: 'ab' }), + ] + ) + + flag = factory.boolean_flag_with_clauses([clause]) + + expect(basic_evaluator.evaluate(flag, context1).detail.value).to be false + expect(basic_evaluator.evaluate(flag, context2).detail.value).to be true + expect(basic_evaluator.evaluate(flag, context3).detail.value).to be true + end end end end From 5ba3853b072aaf8a6faaa1c39ab5e5e4696495b1 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 27 Oct 2022 16:59:11 -0400 Subject: [PATCH 258/292] Support included / excluded contexts in segments (#210) This commit follows the general approach of the [equivalent PHP SDK PR][pr]. Segments are now able to provide `includedContext` and `excludedContext` properties which can target values within a specific context kind. ```json { "includedContexts": [ { "contextKind": "org", "values": ["orgkey1", "orgkey2"] } ] } ``` [pr]: https://github.com/launchdarkly/php-server-sdk-private/pull/111 --- Makefile | 4 -- lib/ldclient-rb/impl/evaluator.rb | 34 ++++++++++++-- lib/ldclient-rb/impl/evaluator_helpers.rb | 16 +++++++ spec/impl/evaluator_segment_spec.rb | 54 ++++++++++++++++++----- spec/impl/evaluator_spec_base.rb | 4 +- 5 files changed, 92 insertions(+), 20 deletions(-) diff --git a/Makefile b/Makefile index bde2b382..cf140ea4 100644 --- a/Makefile +++ b/Makefile @@ -12,10 +12,6 @@ TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ -skip 'evaluation/parameterized/attribute references' \ -skip 'evaluation/parameterized/bad attribute reference errors' \ -skip 'evaluation/parameterized/prerequisites' \ - -skip 'evaluation/parameterized/segment match/included list is specific to user kind' \ - -skip 'evaluation/parameterized/segment match/includedContexts' \ - -skip 'evaluation/parameterized/segment match/excluded list is specific to user kind' \ - -skip 'evaluation/parameterized/segment match/excludedContexts' \ -skip 'evaluation/parameterized/segment recursion' \ -skip 'evaluation/parameterized/target match/context targets' \ -skip 'evaluation/parameterized/target match/multi-kind' \ diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index d7b85467..e1b7fb2f 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -233,7 +233,6 @@ def clause_match_context_no_segments(clause, context) end def segment_match_context(segment, context, state) - return false unless context.key segment[:unbounded] ? big_segment_match_context(segment, context, state) : simple_segment_match_context(segment, context, true) end @@ -265,8 +264,37 @@ def big_segment_match_context(segment, context, state) def simple_segment_match_context(segment, context, use_includes_and_excludes) if use_includes_and_excludes - return true if segment[:included].include?(context.key) - return false if segment[:excluded].include?(context.key) + if EvaluatorHelpers.context_key_in_target_list(context, nil, segment[:included]) + return true + end + + # @type [Enumerable] + included_contexts = segment[:includedContexts] + if included_contexts.is_a?(Enumerable) + included_contexts.each do |ctx| + return false unless ctx.is_a? Hash + + if EvaluatorHelpers.context_key_in_target_list(context, ctx[:contextKind], ctx[:values]) + return true + end + end + end + + if EvaluatorHelpers.context_key_in_target_list(context, nil, segment[:excluded]) + return false + end + + # @type [Enumerable] + excluded_contexts = segment[:excludedContexts] + if excluded_contexts.is_a?(Enumerable) + excluded_contexts.each do |ctx| + return false unless ctx.is_a? Hash + + if EvaluatorHelpers.context_key_in_target_list(context, ctx[:contextKind], ctx[:values]) + return false + end + end + end end (segment[:rules] || []).each do |r| diff --git a/lib/ldclient-rb/impl/evaluator_helpers.rb b/lib/ldclient-rb/impl/evaluator_helpers.rb index 9629a6aa..41914afb 100644 --- a/lib/ldclient-rb/impl/evaluator_helpers.rb +++ b/lib/ldclient-rb/impl/evaluator_helpers.rb @@ -48,6 +48,22 @@ def self.evaluation_detail_for_variation(flag, index, reason, logger = nil) EvaluationDetail.new(vars[index], index, reason) end end + + # + # @param context [LaunchDarkly::LDContext] + # @param kind [String, nil] + # @param keys [Array] + # @return [Boolean] + # + def self.context_key_in_target_list(context, kind, keys) + return false unless keys.is_a? Enumerable + return false if keys.empty? + + matched_context = context.individual_context(kind || LaunchDarkly::LDContext::KIND_DEFAULT) + return false if matched_context.nil? + + keys.include? matched_context.key + end end end end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 3e87a662..04856522 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -5,11 +5,11 @@ module LaunchDarkly module Impl evaluator_tests_with_and_without_preprocessing "Evaluator (segments)" do |desc, factory| describe "#{desc} - evaluate", :evaluator_spec_base => true do - def test_segment_match(factory, segment) - clause = make_segment_match_clause(segment) + def test_segment_match(factory, segment, context) + clause = make_segment_match_clause(segment, context.individual_context(0).kind) flag = factory.boolean_flag_with_clauses([clause]) e = EvaluatorBuilder.new(logger).with_segment(segment).build - e.evaluate(flag, user).detail.value + e.evaluate(flag, context).detail.value end it "retrieves segment from segment store for segmentMatch operator" do @@ -34,20 +34,50 @@ def test_segment_match(factory, segment) it 'explicitly includes user' do segment = make_segment('segkey') segment[:included] = [ user.key ] - expect(test_segment_match(factory, segment)).to be true + expect(test_segment_match(factory, segment, user)).to be true + end + + it 'explicitly includes a specific context kind' do + org_context = LDContext::create({ key: "orgkey", kind: "org" }) + device_context = LDContext::create({ key: "devicekey", kind: "device" }) + multi_context = LDContext::create_multi([org_context, device_context]) + + segment = make_segment('segkey') + segment[:includedContexts] = [{ contextKind: "org", values: ["orgkey"] }] + + expect(test_segment_match(factory, segment, org_context)).to be true + expect(test_segment_match(factory, segment, device_context)).to be false + expect(test_segment_match(factory, segment, multi_context)).to be true end it 'explicitly excludes user' do segment = make_segment('segkey') segment[:excluded] = [ user.key ] - expect(test_segment_match(factory, segment)).to be false + expect(test_segment_match(factory, segment, user)).to be false + end + + it 'explicitly excludes a specific context kind' do + org_context = LDContext::create({ key: "orgkey", kind: "org" }) + device_context = LDContext::create({ key: "devicekey", kind: "device" }) + multi_context = LDContext::create_multi([org_context, device_context]) + + segment = make_segment('segkey') + segment[:excludedContexts] = [{ contextKind: "org", values: ["orgkey"] }] + + org_clause = make_user_matching_clause(org_context, :key) + device_clause = make_user_matching_clause(device_context, :key) + segment[:rules] = [ { clauses: [ org_clause ] }, { clauses: [ device_clause ] } ] + + expect(test_segment_match(factory, segment, org_context)).to be false + expect(test_segment_match(factory, segment, device_context)).to be true + expect(test_segment_match(factory, segment, multi_context)).to be false end it 'both includes and excludes user; include takes priority' do segment = make_segment('segkey') segment[:included] = [ user.key ] segment[:excluded] = [ user.key ] - expect(test_segment_match(factory, segment)).to be true + expect(test_segment_match(factory, segment, user)).to be true end it 'matches user by rule when weight is absent' do @@ -57,7 +87,7 @@ def test_segment_match(factory, segment) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment)).to be true + expect(test_segment_match(factory, segment, user)).to be true end it 'matches user by rule when weight is nil' do @@ -68,7 +98,7 @@ def test_segment_match(factory, segment) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment)).to be true + expect(test_segment_match(factory, segment, user)).to be true end it 'matches user with full rollout' do @@ -79,7 +109,7 @@ def test_segment_match(factory, segment) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment)).to be true + expect(test_segment_match(factory, segment, user)).to be true end it "doesn't match user with zero rollout" do @@ -90,7 +120,7 @@ def test_segment_match(factory, segment) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment)).to be false + expect(test_segment_match(factory, segment, user)).to be false end it "matches user with multiple clauses" do @@ -101,7 +131,7 @@ def test_segment_match(factory, segment) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment)).to be true + expect(test_segment_match(factory, segment, user)).to be true end it "doesn't match user with multiple clauses if a clause doesn't match" do @@ -113,7 +143,7 @@ def test_segment_match(factory, segment) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment)).to be false + expect(test_segment_match(factory, segment, user)).to be false end end end diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 564095bc..30c1064e 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -112,6 +112,7 @@ def make_user_matching_clause(user, attr = :key) op: :in, values: [ user.get_value(attr) ], negate: false, + contextKind: user.individual_context(0).kind, } end @@ -125,11 +126,12 @@ def make_segment(key) } end - def make_segment_match_clause(segment) + def make_segment_match_clause(segment, kind = nil) { op: :segmentMatch, values: [ segment[:key] ], negate: false, + contextKind: kind || LaunchDarkly::LDContext::KIND_DEFAULT, } end end From 33a3cb99ff53740307309b48ad6e565ed648cb58 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 28 Oct 2022 10:16:48 -0400 Subject: [PATCH 259/292] Add contextKind support for rollouts & experiements (#211) This commit follows the general approach of the [equivalent PHP SDK PR][pr]. [pr]: https://github.com/launchdarkly/php-server-sdk-private/pull/110 --- Makefile | 3 +- lib/ldclient-rb/impl/evaluator.rb | 4 +- lib/ldclient-rb/impl/evaluator_bucketing.rb | 42 ++++++++++----------- spec/impl/evaluator_bucketing_spec.rb | 40 ++++++++++---------- 4 files changed, 44 insertions(+), 45 deletions(-) diff --git a/Makefile b/Makefile index cf140ea4..868860d2 100644 --- a/Makefile +++ b/Makefile @@ -6,9 +6,8 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log # - various other "evaluation" subtests: These tests require context kind support. # - "events": These test suites will be unavailable until more of the U2C implementation is done. TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ - -skip 'evaluation/bucketing/bucket by non-key attribute' \ + -skip 'evaluation/bucketing/bucket by non-key attribute/in rollouts/string value/complex attribute reference' \ -skip 'evaluation/bucketing/secondary' \ - -skip 'evaluation/bucketing/selection of context' \ -skip 'evaluation/parameterized/attribute references' \ -skip 'evaluation/parameterized/bad attribute reference errors' \ -skip 'evaluation/parameterized/prerequisites' \ diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index e1b7fb2f..f4f98bca 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -313,9 +313,9 @@ def segment_rule_match_context(rule, context, segment_key, salt) return true if !rule[:weight] # All of the clauses are met. See if the user buckets in - bucket = EvaluatorBucketing.bucket_context(context, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) + bucket = EvaluatorBucketing.bucket_context(context, rule[:rolloutContextKind], segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) weight = rule[:weight].to_f / 100000.0 - return bucket < weight + return bucket.nil? || bucket < weight end private diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index bd3a238b..f0b6ce15 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -7,7 +7,7 @@ module EvaluatorBucketing # @param flag [Object] the feature flag # @param rule [Object] the rule # @param context [LDContext] the context properties - # @return [Number] the variation index, or nil if there is an error + # @return [Array<[Number, nil], Boolean>] the variation index, or nil if there is an error def self.variation_index_for_context(flag, rule, context) variation = rule[:variation] @@ -16,19 +16,18 @@ def self.variation_index_for_context(flag, rule, context) return nil, false if rollout.nil? variations = rollout[:variations] if !variations.nil? && variations.length > 0 # percentage rollout - bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] + rollout_is_experiment = rollout[:kind] == "experiment" + bucket_by = rollout_is_experiment ? nil : rollout[:bucketBy] + bucket_by = 'key' if bucket_by.nil? seed = rollout[:seed] - bucket = bucket_context(context, flag[:key], bucket_by, flag[:salt], seed) # may not be present + bucket = bucket_context(context, rollout[:contextKind], flag[:key], bucket_by, flag[:salt], seed) # may not be present + in_experiment = rollout_is_experiment && !bucket.nil? sum = 0; variations.each do |variate| - if rollout[:kind] == "experiment" && !variate[:untracked] - in_experiment = true - end - sum += variate[:weight].to_f / 100000.0 - if bucket < sum - return variate[:variation], !!in_experiment + if bucket.nil? || bucket < sum + return variate[:variation], in_experiment && !variate[:untracked] end end # The context's bucket value was greater than or equal to the end of the last bucket. This could happen due @@ -37,9 +36,7 @@ def self.variation_index_for_context(flag, rule, context) # this case (or changing the scaling, which would potentially change the results for *all* contexts), we # will simply put the context in the last bucket. last_variation = variations[-1] - in_experiment = rollout[:kind] == "experiment" && !last_variation[:untracked] - - [last_variation[:variation], in_experiment] + [last_variation[:variation], in_experiment && !last_variation[:untracked]] else # the rule isn't well-formed [nil, false] end @@ -48,20 +45,23 @@ def self.variation_index_for_context(flag, rule, context) # Returns a context's bucket value as a floating-point value in `[0, 1)`. # # @param context [LDContext] the context properties + # @param context_kind [String, nil] the context kind to match against # @param key [String] the feature flag key (or segment key, if this is for a segment rule) # @param bucket_by [String|Symbol] the name of the context attribute to be used for bucketing # @param salt [String] the feature flag's or segment's salt value - # @return [Number] the bucket value, from 0 inclusive to 1 exclusive - def self.bucket_context(context, key, bucket_by, salt, seed) - return nil unless context.key + # @return [Float, nil] the bucket value, from 0 inclusive to 1 exclusive + def self.bucket_context(context, context_kind, key, bucket_by, salt, seed) + matched_context = context.individual_context(context_kind || LaunchDarkly::LDContext::KIND_DEFAULT) + return nil if matched_context.nil? - id_hash = bucketable_string_value(context.get_value(bucket_by)) - if id_hash.nil? - return 0.0 - end + context_value = matched_context.get_value(bucket_by) + return 0.0 if context_value.nil? + + id_hash = bucketable_string_value(context_value) + return 0.0 if id_hash.nil? - if context.get_value(:secondary) - id_hash += "." + context.get_value(:secondary).to_s + if matched_context.get_value(:secondary) + id_hash += "." + matched_context.get_value(:secondary).to_s end if seed diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index 690034ed..7270f258 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -8,54 +8,54 @@ let(:seed) { 61 } it "returns the expected bucket values for seed" do user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) + bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.09801207); user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) + bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.14483777); user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) - bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) + bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.9242641); end it "returns the same bucket regardless of hashKey and salt" do user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket1 = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_context(user, "hashKey1", "key", "saltyB", seed) - bucket3 = subject.bucket_context(user, "hashKey2", "key", "saltyC", seed) + bucket1 = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(user, user.kind, "hashKey1", "key", "saltyB", seed) + bucket3 = subject.bucket_context(user, user.kind, "hashKey2", "key", "saltyC", seed) expect(bucket1).to eq(bucket2) expect(bucket2).to eq(bucket3) end it "returns a different bucket if the seed is not the same" do user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket1 = subject.bucket_context(user, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_context(user, "hashKey1", "key", "saltyB", seed+1) + bucket1 = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(user, user.kind, "hashKey1", "key", "saltyB", seed+1) expect(bucket1).to_not eq(bucket2) end it "returns a different bucket if the user is not the same" do user1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) user2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - bucket1 = subject.bucket_context(user1, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_context(user2, "hashKey1", "key", "saltyB", seed) + bucket1 = subject.bucket_context(user1, user1.kind, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(user2, user2.kind, "hashKey1", "key", "saltyB", seed) expect(bucket1).to_not eq(bucket2) end end it "gets expected bucket values for specific keys" do user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", nil) + bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.42157587); user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", nil) + bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.6708485); user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) - bucket = subject.bucket_context(user, "hashKey", "key", "saltyA", nil) + bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.10343106); end @@ -67,8 +67,8 @@ intAttr: 33333, }, }) - stringResult = subject.bucket_context(user, "hashKey", "stringAttr", "saltyA", nil) - intResult = subject.bucket_context(user, "hashKey", "intAttr", "saltyA", nil) + stringResult = subject.bucket_context(user, user.kind, "hashKey", "stringAttr", "saltyA", nil) + intResult = subject.bucket_context(user, user.kind, "hashKey", "intAttr", "saltyA", nil) expect(intResult).to be_within(0.0000001).of(0.54771423) expect(intResult).to eq(stringResult) @@ -81,7 +81,7 @@ floatAttr: 33.5, }, }) - result = subject.bucket_context(user, "hashKey", "floatAttr", "saltyA", nil) + result = subject.bucket_context(user, user.kind, "hashKey", "floatAttr", "saltyA", nil) expect(result).to eq(0.0) end @@ -93,7 +93,7 @@ boolAttr: true, }, }) - result = subject.bucket_context(user, "hashKey", "boolAttr", "saltyA", nil) + result = subject.bucket_context(user, user.kind, "hashKey", "boolAttr", "saltyA", nil) expect(result).to eq(0.0) end end @@ -107,7 +107,7 @@ # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_context(user, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, nil) * 100000).truncate() expect(bucket_value).to be > 0 expect(bucket_value).to be < 100000 @@ -135,7 +135,7 @@ flag_key = "flagkey" salt = "salt" - bucket_value = (subject.bucket_context(user, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, nil) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { @@ -194,7 +194,7 @@ salt = "salt" seed = 61 - bucket_value = (subject.bucket_context(user, flag_key, "key", salt, seed) * 100000).truncate() + bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, seed) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { From a500be9e54136584d75f2f08cecf6e6e13adf050 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 28 Oct 2022 10:40:02 -0400 Subject: [PATCH 260/292] Style and test matcher improvements (#212) This commit enables several rubocop rules that were previously disabled. Once enabled, `rubocop -A` was run to automatically apply these fixes. There are a couple of additional changes that were made by hand: - I added the rubocop and rubocop-performance gems as dev packages. This should help address the original installation issue we ran into when I introduced these tools. - By default, new rubocop rules are disabled. This was the default before, but if you don't explicitly set this value, each run generates a ton of warning noise. This quiets that down. - Updates some LDContext tests to be more strict in their expectations of truth. --- .gitignore | 1 + .rubocop.yml | 14 ++-- contract-tests/Gemfile | 2 + contract-tests/big_segment_store_fixture.rb | 2 +- contract-tests/client_entity.rb | 10 +-- launchdarkly-server-sdk.gemspec | 2 + lib/ldclient-rb/context.rb | 14 ++-- lib/ldclient-rb/evaluation_detail.rb | 14 ++-- lib/ldclient-rb/events.rb | 46 +++++------ lib/ldclient-rb/flags_state.rb | 4 +- lib/ldclient-rb/impl/big_segments.rb | 16 ++-- lib/ldclient-rb/impl/diagnostic_events.rb | 2 +- lib/ldclient-rb/impl/evaluator.rb | 36 ++++----- lib/ldclient-rb/impl/evaluator_bucketing.rb | 6 +- lib/ldclient-rb/impl/event_sender.rb | 6 +- lib/ldclient-rb/impl/event_summarizer.rb | 7 +- lib/ldclient-rb/impl/event_types.rb | 4 +- .../impl/integrations/consul_impl.rb | 8 +- .../impl/integrations/dynamodb_impl.rb | 14 ++-- .../impl/integrations/file_data_source.rb | 4 +- .../impl/integrations/redis_impl.rb | 8 +- .../impl/model/preprocessed_data.rb | 2 +- lib/ldclient-rb/impl/repeating_task.rb | 2 +- lib/ldclient-rb/impl/store_data_set_sorter.rb | 4 +- lib/ldclient-rb/impl/unbounded_pool.rb | 2 +- lib/ldclient-rb/integrations/consul.rb | 2 +- lib/ldclient-rb/integrations/file_data.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 4 +- lib/ldclient-rb/integrations/test_data.rb | 2 +- .../integrations/util/store_wrapper.rb | 16 ++-- lib/ldclient-rb/ldclient.rb | 14 ++-- lib/ldclient-rb/memoized_value.rb | 2 +- lib/ldclient-rb/non_blocking_thread_pool.rb | 2 +- lib/ldclient-rb/polling.rb | 4 +- lib/ldclient-rb/requestor.rb | 6 +- lib/ldclient-rb/stream.rb | 2 +- lib/ldclient-rb/user_filter.rb | 4 +- lib/ldclient-rb/util.rb | 6 +- spec/context_spec.rb | 80 +++++++++---------- spec/evaluation_detail_spec.rb | 2 +- spec/events_spec.rb | 7 +- spec/http_util.rb | 6 +- spec/impl/evaluator_bucketing_spec.rb | 12 +-- spec/impl/evaluator_spec_base.rb | 8 +- spec/impl/repeating_task_spec.rb | 2 +- spec/integrations/dynamodb_stores_spec.rb | 2 +- spec/integrations/store_wrapper_spec.rb | 4 +- spec/mock_components.rb | 2 +- spec/requestor_spec.rb | 2 +- 49 files changed, 214 insertions(+), 209 deletions(-) diff --git a/.gitignore b/.gitignore index d7b37d2f..d1ed1a09 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ mkmf.log .DS_Store Gemfile.lock .ruby-version +contract-tests/contract-tests.iml diff --git a/.rubocop.yml b/.rubocop.yml index d5a11033..94e5e93d 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -6,6 +6,7 @@ AllCops: - lib/**/*.rb - spec/**/*.rb - contract-tests/**/*.rb + NewCops: disable Naming/AccessorMethodName: Description: Check the naming of accessor methods for get_/set_. @@ -228,12 +229,12 @@ Style/NegatedIf: Favor unless over if for negative conditions (or control flow or). StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#unless-for-negatives' - Enabled: false + Enabled: true Style/NegatedWhile: Description: 'Favor until over while for negative conditions.' StyleGuide: 'https://github.com/bbatsov/ruby-style-guide#until-for-negatives' - Enabled: false + Enabled: true Style/Next: Description: 'Use `next` to skip iteration instead of a condition at the end.' @@ -829,7 +830,7 @@ Style/RedundantBegin: Enabled: false Style/RedundantCondition: - Enabled: false + Enabled: true Style/RedundantException: Enabled: false @@ -838,13 +839,13 @@ Style/RedundantFileExtensionInRequire: Enabled: false Style/RedundantParentheses: - Enabled: false + Enabled: true Style/RedundantRegexpEscape: Enabled: false Style/RedundantReturn: - Enabled: false + Enabled: true Style/RedundantSelf: Enabled: false @@ -856,7 +857,8 @@ Style/SafeNavigation: Enabled: false Style/Semicolon: - Enabled: false + Enabled: true + AllowAsExpressionSeparator: true Style/SlicingWithRange: Enabled: false diff --git a/contract-tests/Gemfile b/contract-tests/Gemfile index 48b8812f..4e343a12 100644 --- a/contract-tests/Gemfile +++ b/contract-tests/Gemfile @@ -8,3 +8,5 @@ gem 'sinatra', '~> 2.1' gem 'glassfish', :platforms => :jruby gem 'thin', :platforms => :ruby gem 'json' +gem 'rubocop', '~> 1.37', group: 'development' +gem 'rubocop-performance', '~> 1.15', group: 'development' diff --git a/contract-tests/big_segment_store_fixture.rb b/contract-tests/big_segment_store_fixture.rb index db17be09..5681afed 100644 --- a/contract-tests/big_segment_store_fixture.rb +++ b/contract-tests/big_segment_store_fixture.rb @@ -15,7 +15,7 @@ def get_membership(user_hash) response = HTTP.post("#{@uri}/getMembership", :json => {:userHash => user_hash}) json = response.parse(:json) - return json['values'] + json['values'] end def stop diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index f1c107ae..062e9933 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -14,13 +14,13 @@ def initialize(log, config) if config[:streaming] streaming = config[:streaming] - opts[:stream_uri] = streaming[:baseUri] if !streaming[:baseUri].nil? - opts[:initial_reconnect_delay] = streaming[:initialRetryDelayMs] / 1_000.0 if !streaming[:initialRetryDelayMs].nil? + opts[:stream_uri] = streaming[:baseUri] unless streaming[:baseUri].nil? + opts[:initial_reconnect_delay] = streaming[:initialRetryDelayMs] / 1_000.0 unless streaming[:initialRetryDelayMs].nil? elsif config[:polling] polling = config[:polling] opts[:stream] = false - opts[:base_uri] = polling[:baseUri] if !polling[:baseUri].nil? - opts[:poll_interval] = polling[:pollIntervalMs] / 1_000.0 if !polling[:pollIntervalMs].nil? + opts[:base_uri] = polling[:baseUri] unless polling[:baseUri].nil? + opts[:poll_interval] = polling[:pollIntervalMs] / 1_000.0 unless polling[:pollIntervalMs].nil? end if config[:events] @@ -30,7 +30,7 @@ def initialize(log, config) opts[:diagnostic_opt_out] = !events[:enableDiagnostics] opts[:all_attributes_private] = !!events[:allAttributesPrivate] opts[:private_attribute_names] = events[:globalPrivateAttributes] - opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) if !events[:flushIntervalMs].nil? + opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) unless events[:flushIntervalMs].nil? opts[:inline_users_in_events] = events[:inlineUsers] || false else opts[:send_events] = false diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 56335a4c..95e73677 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -31,6 +31,8 @@ Gem::Specification.new do |spec| spec.add_development_dependency "timecop", "~> 0.9" spec.add_development_dependency "listen", "~> 3.3" # see file_data_source.rb spec.add_development_dependency "webrick", "~> 1.7" + spec.add_development_dependency "rubocop", "~> 1.37" + spec.add_development_dependency "rubocop-performance", "~> 1.15" # required by dynamodb spec.add_development_dependency "oga", "~> 2.2" diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 2bf84901..28ebc6f1 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -228,15 +228,15 @@ def individual_context(kind) private def get_top_level_addressable_attribute_single_kind(name) case name when :kind - return kind + kind when :key - return key + key when :name - return @name + @name when :anonymous - return @anonymous + @anonymous when :secondary - return @secondary + @secondary else @attributes&.fetch(name, nil) end @@ -330,7 +330,7 @@ def self.create_multi(contexts) # @return [LDContext] # private_class_method def self.create_invalid_context(error) - return new(nil, nil, nil, false, nil, nil, nil, error) + new(nil, nil, nil, false, nil, nil, nil, error) end # @@ -376,7 +376,7 @@ def self.create_multi(contexts) return create_invalid_context("The provided private attributes are not an array") end - return new(key.to_s, KIND_DEFAULT, name, anonymous, data[:secondary], attributes, private_attributes) + new(key.to_s, KIND_DEFAULT, name, anonymous, data[:secondary], attributes, private_attributes) end # diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index cafeff73..616faf7b 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -12,7 +12,7 @@ class EvaluationDetail # @raise [ArgumentError] if `variation_index` or `reason` is not of the correct type def initialize(value, variation_index, reason) raise ArgumentError.new("variation_index must be a number") if !variation_index.nil? && !(variation_index.is_a? Numeric) - raise ArgumentError.new("reason must be an EvaluationReason") if !(reason.is_a? EvaluationReason) + raise ArgumentError.new("reason must be an EvaluationReason") unless reason.is_a? EvaluationReason @value = value @variation_index = variation_index @reason = reason @@ -176,7 +176,7 @@ def self.target_match # @return [EvaluationReason] # @raise [ArgumentError] if `rule_index` is not a number or `rule_id` is not a string def self.rule_match(rule_index, rule_id, in_experiment=false) - raise ArgumentError.new("rule_index must be a number") if !(rule_index.is_a? Numeric) + raise ArgumentError.new("rule_index must be a number") unless rule_index.is_a? Numeric raise ArgumentError.new("rule_id must be a string") if !rule_id.nil? && !(rule_id.is_a? String) # in test data, ID could be nil if in_experiment @@ -193,7 +193,7 @@ def self.rule_match(rule_index, rule_id, in_experiment=false) # @return [EvaluationReason] # @raise [ArgumentError] if `prerequisite_key` is nil or not a string def self.prerequisite_failed(prerequisite_key) - raise ArgumentError.new("prerequisite_key must be a string") if !(prerequisite_key.is_a? String) + raise ArgumentError.new("prerequisite_key must be a string") unless prerequisite_key.is_a? String new(:PREREQUISITE_FAILED, nil, nil, prerequisite_key, nil) end @@ -203,7 +203,7 @@ def self.prerequisite_failed(prerequisite_key) # @return [EvaluationReason] # @raise [ArgumentError] if `error_kind` is not a symbol def self.error(error_kind) - raise ArgumentError.new("error_kind must be a symbol") if !(error_kind.is_a? Symbol) + raise ArgumentError.new("error_kind must be a symbol") unless error_kind.is_a? Symbol e = @@error_instances[error_kind] e.nil? ? make_error(error_kind) : e end @@ -279,7 +279,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json else { kind: @kind } end - if !@big_segments_status.nil? + unless @big_segments_status.nil? ret[:bigSegmentsStatus] = @big_segments_status end ret @@ -327,9 +327,9 @@ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_exper @kind = kind.to_sym @rule_index = rule_index @rule_id = rule_id - @rule_id.freeze if !rule_id.nil? + @rule_id.freeze unless rule_id.nil? @prerequisite_key = prerequisite_key - @prerequisite_key.freeze if !prerequisite_key.nil? + @prerequisite_key.freeze unless prerequisite_key.nil? @error_kind = error_kind @in_experiment = in_experiment @big_segments_status = big_segments_status diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index cd00f294..fd3d1bba 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -139,7 +139,7 @@ def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test @inbox_full = Concurrent::AtomicBoolean.new(false) event_sender = (test_properties || {})[:event_sender] || - Impl::EventSender.new(sdk_key, config, client ? client : Util.new_http_client(config.events_uri, config)) + Impl::EventSender.new(sdk_key, config, client || Util.new_http_client(config.events_uri, config)) @timestamp_fn = (test_properties || {})[:timestamp_fn] || proc { Impl::Util.current_time_millis } @@ -180,7 +180,7 @@ def stop if @stopped.make_true @flush_task.shutdown @users_flush_task.shutdown - @diagnostic_event_task.shutdown if !@diagnostic_event_task.nil? + @diagnostic_event_task.shutdown unless @diagnostic_event_task.nil? # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox # is full; an orderly shutdown can't happen unless these messages are received. @inbox << FlushMessage.new @@ -282,7 +282,7 @@ def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) def do_shutdown(flush_workers, diagnostic_event_workers) flush_workers.shutdown flush_workers.wait_for_termination - if !diagnostic_event_workers.nil? + unless diagnostic_event_workers.nil? diagnostic_event_workers.shutdown diagnostic_event_workers.wait_for_termination end @@ -292,7 +292,7 @@ def do_shutdown(flush_workers, diagnostic_event_workers) def synchronize_for_testing(flush_workers, diagnostic_event_workers) # Used only by unit tests. Wait until all active flush workers have finished. flush_workers.wait_all - diagnostic_event_workers.wait_all if !diagnostic_event_workers.nil? + diagnostic_event_workers.wait_all unless diagnostic_event_workers.nil? end def dispatch_event(event, outbox) @@ -316,14 +316,14 @@ def dispatch_event(event, outbox) # For each user we haven't seen before, we add an index event - unless this is already # an identify event for that user. - if !(will_add_full_event && @config.inline_users_in_events) + unless will_add_full_event && @config.inline_users_in_events if !event.user.nil? && !notice_user(event.user) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.user)) end end outbox.add_event(event) if will_add_full_event - outbox.add_event(debug_event) if !debug_event.nil? + outbox.add_event(debug_event) unless debug_event.nil? end # Add to the set of users we've noticed, and return true if the user was already known to us. @@ -362,7 +362,7 @@ def trigger_flush(outbox, flush_workers) events_out = @formatter.make_output_events(payload.events, payload.summary) result = @event_sender.send_event_data(events_out.to_json, "#{events_out.length} events", false) @disabled.value = true if result.must_shutdown - if !result.time_from_server.nil? + unless result.time_from_server.nil? @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i end rescue => e @@ -417,7 +417,7 @@ def add_event(event) @capacity_exceeded = false else @dropped_events += 1 - if !@capacity_exceeded + unless @capacity_exceeded @capacity_exceeded = true @logger.warn { "[LDClient] Exceeded event queue capacity. Increase capacity to avoid dropping events." } end @@ -429,7 +429,7 @@ def add_to_summary(event) end def get_payload - return FlushPayload.new(@events, @summarizer.snapshot) + FlushPayload.new(@events, @summarizer.snapshot) end def get_and_clear_dropped_count @@ -462,7 +462,7 @@ def initialize(config) # Transforms events into the format used for event sending. def make_output_events(events, summary) events_out = events.map { |e| make_output_event(e) } - if !summary.counters.empty? + unless summary.counters.empty? events_out.push(make_summary_event(summary)) end events_out @@ -478,13 +478,13 @@ def make_output_events(events, summary) key: event.key, value: event.value, } - out[:default] = event.default if !event.default.nil? - out[:variation] = event.variation if !event.variation.nil? - out[:version] = event.version if !event.version.nil? - out[:prereqOf] = event.prereq_of if !event.prereq_of.nil? + out[:default] = event.default unless event.default.nil? + out[:variation] = event.variation unless event.variation.nil? + out[:version] = event.version unless event.version.nil? + out[:prereqOf] = event.prereq_of unless event.prereq_of.nil? set_opt_context_kind(out, event.user) set_user_or_user_key(out, event.user) - out[:reason] = event.reason if !event.reason.nil? + out[:reason] = event.reason unless event.reason.nil? out when LaunchDarkly::Impl::IdentifyEvent @@ -501,9 +501,9 @@ def make_output_events(events, summary) creationDate: event.timestamp, key: event.key, } - out[:data] = event.data if !event.data.nil? + out[:data] = event.data unless event.data.nil? set_user_or_user_key(out, event.user) - out[:metricValue] = event.metric_value if !event.metric_value.nil? + out[:metricValue] = event.metric_value unless event.metric_value.nil? set_opt_context_kind(out, event.user) out @@ -523,12 +523,12 @@ def make_output_events(events, summary) user: process_user(original.user), value: original.value, } - out[:default] = original.default if !original.default.nil? - out[:variation] = original.variation if !original.variation.nil? - out[:version] = original.version if !original.version.nil? - out[:prereqOf] = original.prereq_of if !original.prereq_of.nil? + out[:default] = original.default unless original.default.nil? + out[:variation] = original.variation unless original.variation.nil? + out[:version] = original.version unless original.version.nil? + out[:prereqOf] = original.prereq_of unless original.prereq_of.nil? set_opt_context_kind(out, original.user) - out[:reason] = original.reason if !original.reason.nil? + out[:reason] = original.reason unless original.reason.nil? out else @@ -547,7 +547,7 @@ def make_output_events(events, summary) value: counter.value, count: counter.count, } - c[:variation] = variation if !variation.nil? + c[:variation] = variation unless variation.nil? if version.nil? c[:unknown] = true else diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 50fcec88..4657cfc6 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -34,11 +34,11 @@ def add_flag(flag_state, with_reasons, details_only_if_tracked) meta[:reason] = reason end - if !omit_details + unless omit_details meta[:version] = flag_state[:version] end - meta[:variation] = flag_state[:variation] if !flag_state[:variation].nil? + meta[:variation] = flag_state[:variation] unless flag_state[:variation].nil? meta[:trackEvents] = true if flag_state[:trackEvents] meta[:trackReason] = true if flag_state[:trackReason] meta[:debugEventsUntilDate] = flag_state[:debugEventsUntilDate] if flag_state[:debugEventsUntilDate] diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index 3ec02671..5f407cf1 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -22,7 +22,7 @@ def initialize(big_segments_config, logger) @logger = logger @last_status = nil - if !@store.nil? + unless @store.nil? @cache = ExpiringCache.new(big_segments_config.user_cache_size, big_segments_config.user_cache_time) @poll_worker = RepeatingTask.new(big_segments_config.status_poll_interval, 0, -> { poll_store_and_update_status }, logger) @poll_worker.start @@ -32,14 +32,14 @@ def initialize(big_segments_config, logger) attr_reader :status_provider def stop - @poll_worker.stop if !@poll_worker.nil? - @store.stop if !@store.nil? + @poll_worker.stop unless @poll_worker.nil? + @store.stop unless @store.nil? end def get_user_membership(user_key) - return nil if !@store + return nil unless @store membership = @cache[user_key] - if !membership + unless membership begin membership = @store.get_membership(BigSegmentStoreManager.hash_for_user_key(user_key)) membership = EMPTY_MEMBERSHIP if membership.nil? @@ -49,8 +49,8 @@ def get_user_membership(user_key) return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR) end end - poll_store_and_update_status if !@last_status - if !@last_status.available + poll_store_and_update_status unless @last_status + unless @last_status.available return BigSegmentMembershipResult.new(membership, BigSegmentsStatus::STORE_ERROR) end BigSegmentMembershipResult.new(membership, @last_status.stale ? BigSegmentsStatus::STALE : BigSegmentsStatus::HEALTHY) @@ -62,7 +62,7 @@ def get_status def poll_store_and_update_status new_status = Interfaces::BigSegmentStoreStatus.new(false, false) # default to "unavailable" if we don't get a new status below - if !@store.nil? + unless @store.nil? begin metadata = @store.get_metadata new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || stale?(metadata.last_up_to_date)) diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb index 21123940..da5aa03e 100644 --- a/lib/ldclient-rb/impl/diagnostic_events.rb +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -25,7 +25,7 @@ def reset(time) end def create_init_event(config) - return { + { kind: 'diagnostic-init', creationDate: Util.current_time_millis, id: @id, diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index f4f98bca..9b0e641f 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -62,14 +62,14 @@ def self.error_result(errorKind, value = nil) def evaluate(flag, context) result = EvalResult.new detail = eval_internal(flag, context, result) - if !result.big_segments_status.nil? + unless result.big_segments_status.nil? # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at # some point and we will want to include the status in the evaluation reason. detail = EvaluationDetail.new(detail.value, detail.variation_index, detail.reason.with_big_segments_status(result.big_segments_status)) end result.detail = detail - return result + result end def self.make_big_segment_ref(segment) # method is visible for testing @@ -82,12 +82,12 @@ def self.make_big_segment_ref(segment) # method is visible for testing private def eval_internal(flag, context, state) - if !flag[:on] + unless flag[:on] return EvaluatorHelpers.off_result(flag) end prereq_failure_result = check_prerequisites(flag, context, state) - return prereq_failure_result if !prereq_failure_result.nil? + return prereq_failure_result unless prereq_failure_result.nil? # Check context target matches (flag[:targets] || []).each do |target| @@ -111,12 +111,12 @@ def eval_internal(flag, context, state) end # Check the fallthrough rule - if !flag[:fallthrough].nil? + unless flag[:fallthrough].nil? return get_value_for_variation_or_rollout(flag, flag[:fallthrough], context, EvaluationReason::fallthrough, EvaluatorHelpers.fallthrough_precomputed_results(flag)) end - return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) + EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end def check_prerequisites(flag, context, state) @@ -144,7 +144,7 @@ def check_prerequisites(flag, context, state) prereq_ok = false end end - if !prereq_ok + unless prereq_ok return EvaluatorHelpers.prerequisite_failed_result(prerequisite, flag) end end @@ -152,13 +152,13 @@ def check_prerequisites(flag, context, state) end def rule_match_context(rule, context, state) - return false if !rule[:clauses] + return false unless rule[:clauses] (rule[:clauses] || []).each do |clause| - return false if !clause_match_context(clause, context, state) + return false unless clause_match_context(clause, context, state) end - return true + true end def clause_match_context(clause, context, state) @@ -237,7 +237,7 @@ def segment_match_context(segment, context, state) end def big_segment_match_context(segment, context, state) - if !segment[:generation] + unless segment[:generation] # Big segment queries can only be done if the generation is known. If it's unset, # that probably means the data store was populated by an older SDK that doesn't know # about the generation property and therefore dropped it from the JSON data. We'll treat @@ -245,7 +245,7 @@ def big_segment_match_context(segment, context, state) state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED return false end - if !state.big_segments_status + unless state.big_segments_status result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(context.key) if result state.big_segments_membership = result.membership @@ -258,7 +258,7 @@ def big_segment_match_context(segment, context, state) segment_ref = Evaluator.make_big_segment_ref(segment) membership = state.big_segments_membership included = membership.nil? ? nil : membership[segment_ref] - return included if !included.nil? + return included unless included.nil? simple_segment_match_context(segment, context, false) end @@ -301,7 +301,7 @@ def simple_segment_match_context(segment, context, use_includes_and_excludes) return true if segment_rule_match_context(r, context, segment[:key], segment[:salt]) end - return false + false end def segment_rule_match_context(rule, context, segment_key, salt) @@ -310,12 +310,12 @@ def segment_rule_match_context(rule, context, segment_key, salt) end # If the weight is absent, this rule matches - return true if !rule[:weight] + return true unless rule[:weight] # All of the clauses are met. See if the user buckets in bucket = EvaluatorBucketing.bucket_context(context, rule[:rolloutContextKind], segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) weight = rule[:weight].to_f / 100000.0 - return bucket.nil? || bucket < weight + bucket.nil? || bucket < weight end private @@ -327,7 +327,7 @@ def get_value_for_variation_or_rollout(flag, vr, context, reason, precomputed_re return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end if precomputed_results - return precomputed_results.for_variation(index, in_experiment) + precomputed_results.for_variation(index, in_experiment) else #if in experiment is true, set reason to a different reason instance/singleton with in_experiment set if in_experiment @@ -337,7 +337,7 @@ def get_value_for_variation_or_rollout(flag, vr, context, reason, precomputed_re reason = EvaluationReason::rule_match(reason.rule_index, reason.rule_id, in_experiment) end end - return EvaluatorHelpers.evaluation_detail_for_variation(flag, index, reason) + EvaluatorHelpers.evaluation_detail_for_variation(flag, index, reason) end end end diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index f0b6ce15..3b995ed6 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -11,7 +11,7 @@ module EvaluatorBucketing def self.variation_index_for_context(flag, rule, context) variation = rule[:variation] - return variation, false if !variation.nil? # fixed variation + return variation, false unless variation.nil? # fixed variation rollout = rule[:rollout] return nil, false if rollout.nil? variations = rollout[:variations] @@ -23,7 +23,7 @@ def self.variation_index_for_context(flag, rule, context) seed = rollout[:seed] bucket = bucket_context(context, rollout[:contextKind], flag[:key], bucket_by, flag[:salt], seed) # may not be present in_experiment = rollout_is_experiment && !bucket.nil? - sum = 0; + sum = 0 variations.each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket.nil? || bucket < sum @@ -70,7 +70,7 @@ def self.bucket_context(context, context_kind, key, bucket_by, salt, seed) hash_key = "%s.%s.%s" % [key, salt, id_hash] end - hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] + hash_val = Digest::SHA1.hexdigest(hash_key)[0..14] hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index cc5da055..49552f39 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -43,7 +43,7 @@ def send_event_data(event_data, description, is_diagnostic) headers = {} headers["content-type"] = "application/json" Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } - if !is_diagnostic + unless is_diagnostic headers["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s headers["X-LaunchDarkly-Payload-ID"] = payload_id end @@ -60,7 +60,7 @@ def send_event_data(event_data, description, is_diagnostic) body = response.to_s if status >= 200 && status < 300 res_time = nil - if !response.headers["date"].nil? + unless response.headers["date"].nil? begin res_time = Time.httpdate(response.headers["date"]) rescue ArgumentError @@ -77,7 +77,7 @@ def send_event_data(event_data, description, is_diagnostic) end end # used up our retries - return EventSenderResult.new(false, false, nil) + EventSenderResult.new(false, false, nil) ensure @http_client_pool.release(http_client) end diff --git a/lib/ldclient-rb/impl/event_summarizer.rb b/lib/ldclient-rb/impl/event_summarizer.rb index 5c9dcc1a..8a66a4d9 100644 --- a/lib/ldclient-rb/impl/event_summarizer.rb +++ b/lib/ldclient-rb/impl/event_summarizer.rb @@ -22,7 +22,7 @@ def initialize # Adds this event to our counters, if it is a type of event we need to count. def summarize_event(event) - return if !event.is_a?(LaunchDarkly::Impl::EvalEvent) + return unless event.is_a?(LaunchDarkly::Impl::EvalEvent) counters_for_flag = @counters[event.key] if counters_for_flag.nil? @@ -41,7 +41,7 @@ def summarize_event(event) variation_counter.count = variation_counter.count + 1 end time = event.timestamp - if !time.nil? + unless time.nil? @start_date = time if @start_date == 0 || time < @start_date @end_date = time if time > @end_date end @@ -49,8 +49,7 @@ def summarize_event(event) # Returns a snapshot of the current summarized event data, and resets this state. def snapshot - ret = EventSummary.new(@start_date, @end_date, @counters) - ret + EventSummary.new(@start_date, @end_date, @counters) end def clear diff --git a/lib/ldclient-rb/impl/event_types.rb b/lib/ldclient-rb/impl/event_types.rb index 3a30dbb0..d2152767 100644 --- a/lib/ldclient-rb/impl/event_types.rb +++ b/lib/ldclient-rb/impl/event_types.rb @@ -48,8 +48,8 @@ class CustomEvent < Event def initialize(timestamp, user, key, data = nil, metric_value = nil) super(timestamp, user) @key = key - @data = data if !data.nil? - @metric_value = metric_value if !metric_value.nil? + @data = data unless data.nil? + @metric_value = metric_value unless metric_value.nil? end attr_reader :key diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index f381d578..f5043fb9 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -16,14 +16,14 @@ class ConsulFeatureStoreCore end def initialize(opts) - if !CONSUL_ENABLED + unless CONSUL_ENABLED raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") end @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger - Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? - Diplomat.configuration.url = opts[:url] if !opts[:url].nil? + Diplomat.configuration = opts[:consul_config] unless opts[:consul_config].nil? + Diplomat.configuration.url = opts[:url] unless opts[:url].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end @@ -70,7 +70,7 @@ def get_all_internal(kind) results = Diplomat::Kv.get(kind_key(kind), { recurse: true }, :return) (results == "" ? [] : results).each do |result| value = result[:value] - if !value.nil? + unless value.nil? item = Model.deserialize(kind, value) items_out[item[:key].to_sym] = item end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index bb2fd2df..fc5543c3 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -21,7 +21,7 @@ class DynamoDBStoreImplBase SORT_KEY = "key" def initialize(table_name, opts) - if !AWS_SDK_ENABLED + unless AWS_SDK_ENABLED raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem") end @@ -223,11 +223,11 @@ def unmarshal_item(kind, item) end class DynamoDBBigSegmentStore < DynamoDBStoreImplBase - KEY_METADATA = 'big_segments_metadata'; - KEY_USER_DATA = 'big_segments_user'; - ATTR_SYNC_TIME = 'synchronizedOn'; - ATTR_INCLUDED = 'included'; - ATTR_EXCLUDED = 'excluded'; + KEY_METADATA = 'big_segments_metadata' + KEY_USER_DATA = 'big_segments_user' + ATTR_SYNC_TIME = 'synchronizedOn' + ATTR_INCLUDED = 'included' + ATTR_EXCLUDED = 'excluded' def initialize(table_name, opts) super(table_name, opts) @@ -258,7 +258,7 @@ def get_membership(user_hash) PARTITION_KEY => @prefix + KEY_USER_DATA, SORT_KEY => user_hash, }) - return nil if !data.item + return nil unless data.item excluded_refs = data.item[ATTR_EXCLUDED] || [] included_refs = data.item[ATTR_INCLUDED] || [] if excluded_refs.empty? && included_refs.empty? diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb index a1e4b326..9ef50ef8 100644 --- a/lib/ldclient-rb/impl/integrations/file_data_source.rb +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -65,7 +65,7 @@ def start end def stop - @listener.stop if !@listener.nil? + @listener.stop unless @listener.nil? end private @@ -123,7 +123,7 @@ def add_item(all_data, kind, item) items = all_data[kind] raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash key = item[:key].to_sym - if !items[key].nil? + unless items[key].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end items[key] = item diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 9bda5460..c1e5854a 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -15,7 +15,7 @@ class RedisStoreImplBase end def initialize(opts) - if !REDIS_ENABLED + unless REDIS_ENABLED raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool") end @@ -55,11 +55,11 @@ def stop if opts[:redis_url] redis_opts[:url] = opts[:redis_url] end - if !redis_opts.include?(:url) + unless redis_opts.include?(:url) redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url end max_connections = opts[:max_connections] || 16 - return opts[:pool] || ConnectionPool.new(size: max_connections) do + opts[:pool] || ConnectionPool.new(size: max_connections) do ::Redis.new(redis_opts) end end @@ -152,7 +152,7 @@ def initialized_internal? private def before_update_transaction(base_key, key) - @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + @test_hook.before_update_transaction(base_key, key) unless @test_hook.nil? end def items_key(kind) diff --git a/lib/ldclient-rb/impl/model/preprocessed_data.rb b/lib/ldclient-rb/impl/model/preprocessed_data.rb index c70c3ce8..ee00e711 100644 --- a/lib/ldclient-rb/impl/model/preprocessed_data.rb +++ b/lib/ldclient-rb/impl/model/preprocessed_data.rb @@ -120,7 +120,7 @@ def preprocess_item!(kind, item) end def preprocess_all_items!(kind, items_map) - return items_map if !items_map + return items_map unless items_map items_map.each do |key, item| preprocess_item!(kind, item) end diff --git a/lib/ldclient-rb/impl/repeating_task.rb b/lib/ldclient-rb/impl/repeating_task.rb index bb0255fe..299454cc 100644 --- a/lib/ldclient-rb/impl/repeating_task.rb +++ b/lib/ldclient-rb/impl/repeating_task.rb @@ -19,7 +19,7 @@ def start if @start_delay sleep(@start_delay) end - while !@stopped.value do + until @stopped.value do started_at = Time.now begin @task.call diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4454fe75..9ad15729 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -33,7 +33,7 @@ def self.sort_collection(kind, input) return input if dependency_fn.nil? || input.empty? remaining_items = input.clone items_out = {} - while !remaining_items.empty? + until remaining_items.empty? # pick a random item that hasn't been updated yet key, item = remaining_items.first self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) @@ -46,7 +46,7 @@ def self.add_with_dependencies_first(item, dependency_fn, remaining_items, items remaining_items.delete(item_key) # we won't need to visit this item again dependency_fn.call(item).each do |dep_key| dep_item = remaining_items[dep_key.to_sym] - self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) if !dep_item.nil? + self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) unless dep_item.nil? end items_out[item_key] = item end diff --git a/lib/ldclient-rb/impl/unbounded_pool.rb b/lib/ldclient-rb/impl/unbounded_pool.rb index 55bd515f..c8219241 100644 --- a/lib/ldclient-rb/impl/unbounded_pool.rb +++ b/lib/ldclient-rb/impl/unbounded_pool.rb @@ -25,7 +25,7 @@ def release(instance) def dispose_all @lock.synchronize { - @pool.map { |instance| @instance_destructor.call(instance) } if !@instance_destructor.nil? + @pool.map { |instance| @instance_destructor.call(instance) } unless @instance_destructor.nil? @pool.clear() } end diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index b3947047..1365baf9 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -38,7 +38,7 @@ def self.default_prefix # def self.new_feature_store(opts = {}) core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) - return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end end diff --git a/lib/ldclient-rb/integrations/file_data.rb b/lib/ldclient-rb/integrations/file_data.rb index b13128a2..31aa235b 100644 --- a/lib/ldclient-rb/integrations/file_data.rb +++ b/lib/ldclient-rb/integrations/file_data.rb @@ -100,7 +100,7 @@ module FileData # @return an object that can be stored in {Config#data_source} # def self.data_source(options={}) - return lambda { |sdk_key, config| + lambda { |sdk_key, config| Impl::Integrations::FileDataSourceImpl.new(config.feature_store, config.logger, options) } end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 95147286..1d2e579b 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -59,7 +59,7 @@ def self.default_prefix # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(opts = {}) - return RedisFeatureStore.new(opts) + RedisFeatureStore.new(opts) end # @@ -91,7 +91,7 @@ def self.new_feature_store(opts = {}) # @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object # def self.new_big_segment_store(opts) - return LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts) + LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts) end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index f04d75d4..83a343b6 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -172,7 +172,7 @@ def use_preconfigured_segment(segment) key = item[:key].to_sym @lock.with_write_lock do old_item = current[key] - if !old_item.nil? then + unless old_item.nil? then item = item.clone item[:version] = old_item[:version] + 1 end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 4bb22b0e..bb129c9c 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -49,7 +49,7 @@ def init(all_data) @core.init_internal(all_data) @inited.make_true - if !@cache.nil? + unless @cache.nil? @cache.clear all_data.each do |kind, items| @cache[kind] = items_if_not_deleted(items) @@ -61,15 +61,15 @@ def init(all_data) end def get(kind, key) - if !@cache.nil? + unless @cache.nil? cache_key = item_cache_key(kind, key) cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values - return item_if_not_deleted(cached[0]) if !cached.nil? + return item_if_not_deleted(cached[0]) unless cached.nil? end item = @core.get_internal(kind, key) - if !@cache.nil? + unless @cache.nil? @cache[cache_key] = [item] end @@ -77,20 +77,20 @@ def get(kind, key) end def all(kind) - if !@cache.nil? + unless @cache.nil? items = @cache[all_cache_key(kind)] - return items if !items.nil? + return items unless items.nil? end items = items_if_not_deleted(@core.get_all_internal(kind)) - @cache[all_cache_key(kind)] = items if !@cache.nil? + @cache[all_cache_key(kind)] = items unless @cache.nil? items end def upsert(kind, item) new_state = @core.upsert_internal(kind, item) - if !@cache.nil? + unless @cache.nil? @cache[item_cache_key(kind, item[:key])] = [new_state] @cache.delete(all_cache_key(kind)) end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 20558e62..688f6136 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -295,7 +295,7 @@ def track(event_name, user, data = nil, metric_value = nil) def all_flags_state(context, options={}) return FeatureFlagsState.new(false) if @config.offline? - if !initialized? + unless initialized? if @store.initialized? @config.logger.warn { "Called all_flags_state before client initialization; using last known values from data store" } else @@ -407,7 +407,7 @@ def evaluate_internal(key, context, default, with_reasons) return detail end - if !initialized? + unless initialized? if @store.initialized? @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @@ -431,7 +431,7 @@ def evaluate_internal(key, context, default, with_reasons) begin res = @evaluator.evaluate(feature, context) - if !res.prereq_evals.nil? + unless res.prereq_evals.nil? res.prereq_evals.each do |prereq_eval| # TODO: Address when working on u2c events # record_prereq_flag_eval(prereq_eval.prereq_flag, prereq_eval.prereq_of_flag, context, prereq_eval.detail, with_reasons) @@ -443,13 +443,13 @@ def evaluate_internal(key, context, default, with_reasons) end # TODO: Address when working on u2c events # record_flag_eval(feature, context, detail, default, with_reasons) - return detail + detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) # TODO: Address when working on u2c events # record_flag_eval_error(feature, context, default, detail.reason, with_reasons) - return detail + detail end end @@ -496,7 +496,7 @@ def evaluate_internal(key, context, default, with_reasons) end private def experiment?(flag, reason) - return false if !reason + return false unless reason if reason.in_experiment return true @@ -505,7 +505,7 @@ def evaluate_internal(key, context, default, with_reasons) case reason[:kind] when 'RULE_MATCH' index = reason[:ruleIndex] - if !index.nil? + unless index.nil? rules = flag[:rules] || [] return index >= 0 && index < rules.length && rules[index][:trackEvents] end diff --git a/lib/ldclient-rb/memoized_value.rb b/lib/ldclient-rb/memoized_value.rb index ddddb7e0..7a829f29 100644 --- a/lib/ldclient-rb/memoized_value.rb +++ b/lib/ldclient-rb/memoized_value.rb @@ -14,7 +14,7 @@ def initialize(&generator) def get @mutex.synchronize do - if !@inited + unless @inited @value = @generator.call @inited = true end diff --git a/lib/ldclient-rb/non_blocking_thread_pool.rb b/lib/ldclient-rb/non_blocking_thread_pool.rb index 28ec42a9..06d644ec 100644 --- a/lib/ldclient-rb/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/non_blocking_thread_pool.rb @@ -17,7 +17,7 @@ def initialize(capacity) # Attempts to submit a job, but only if a worker is available. Unlike the regular post method, # this returns a value: true if the job was submitted, false if all workers are busy. def post - if !@semaphore.try_acquire(1) + unless @semaphore.try_acquire(1) return end @pool.post do diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index d571f837..89d9f6c9 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -43,8 +43,8 @@ def poll end rescue UnexpectedResponseError => e message = Util.http_error_message(e.status, "polling request", "will retry") - @config.logger.error { "[LDClient] #{message}" }; - if !Util.http_error_recoverable?(e.status) + @config.logger.error { "[LDClient] #{message}" } + unless Util.http_error_recoverable?(e.status) @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set stop end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 69fde38f..58db38ab 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -53,7 +53,7 @@ def make_request(path) Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } headers["Connection"] = "keep-alive" cached = @cache.read(uri) - if !cached.nil? + unless cached.nil? headers["If-None-Match"] = cached.etag end response = @http_client.request("GET", uri, { @@ -72,7 +72,7 @@ def make_request(path) end body = fix_encoding(body, response.headers["content-type"]) etag = response.headers["etag"] - @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? + @cache.write(uri, CacheEntry.new(etag, body)) unless etag.nil? end body end @@ -96,7 +96,7 @@ def parse_content_type(value) break end end - return [parts[0], charset] + [parts[0], charset] end end end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 1fb0284b..e3824538 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -60,7 +60,7 @@ def start status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(status) + unless Util.http_error_recoverable?(status) @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set stop end diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index c5fc45e7..638bb4d0 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -16,7 +16,7 @@ def transform_user_props(user_props) filtered_user_props, removed = filter_values(user_props, user_private_attrs, ALLOWED_TOP_LEVEL_KEYS, IGNORED_TOP_LEVEL_KEYS) custom = user_props[:custom] - if !custom.nil? + unless custom.nil? filtered_user_props[:custom], removed_custom = filter_values(custom, user_private_attrs) removed.merge(removed_custom) end @@ -25,7 +25,7 @@ def transform_user_props(user_props) # note, :privateAttributeNames is what the developer sets; :privateAttrs is what we send to the server filtered_user_props[:privateAttrs] = removed.to_a.sort.map { |s| s.to_s } end - return filtered_user_props + filtered_user_props end private diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index ede16bd3..343bbc98 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -11,7 +11,7 @@ def self.stringify_attrs(hash, attrs) attrs.each do |attr| value = hash[attr] if !value.nil? && !value.is_a?(String) - ret = hash.clone if !changed + ret = hash.clone unless changed ret[attr] = value.to_s changed = true end @@ -25,7 +25,7 @@ def self.new_http_client(uri_s, config) http_client_options["socket_class"] = config.socket_factory end proxy = URI.parse(uri_s).find_proxy - if !proxy.nil? + unless proxy.nil? http_client_options["proxy"] = { proxy_address: proxy.host, proxy_port: proxy.port, @@ -33,7 +33,7 @@ def self.new_http_client(uri_s, config) proxy_password: proxy.password, } end - return HTTP::Client.new(http_client_options) + HTTP::Client.new(http_client_options) .timeout({ read: config.read_timeout, connect: config.connect_timeout, diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 60d1df44..253d3afe 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -6,7 +6,7 @@ it "returns nil for any value if invalid" do result = subject.create({ key: "", kind: "user", name: "testing" }) - expect(result.valid?).to be_falsey + expect(result.valid?).to be false expect(result.key).to be_nil expect(result.get_value(:key)).to be_nil @@ -34,26 +34,26 @@ expect(result).to be_a(LaunchDarkly::LDContext) expect(result.key).to eq("user-key") expect(result.kind).to eq("user") - expect(result.valid?).to be_truthy + expect(result.valid?).to be true end it "allows an empty string for a key, but it cannot be missing or nil" do - expect(subject.create({ key: "" }).valid?).to be_truthy - expect(subject.create({ key: nil }).valid?).to be_falsey - expect(subject.create({}).valid?).to be_falsey + expect(subject.create({ key: "" }).valid?).to be true + expect(subject.create({ key: nil }).valid?).to be false + expect(subject.create({}).valid?).to be false end it "anonymous is required to be a boolean or nil" do - expect(subject.create({ key: "" }).valid?).to be_truthy - expect(subject.create({ key: "", anonymous: true }).valid?).to be_truthy - expect(subject.create({ key: "", anonymous: false }).valid?).to be_truthy - expect(subject.create({ key: "", anonymous: 0 }).valid?).to be_falsey + expect(subject.create({ key: "" }).valid?).to be true + expect(subject.create({ key: "", anonymous: true }).valid?).to be true + expect(subject.create({ key: "", anonymous: false }).valid?).to be true + expect(subject.create({ key: "", anonymous: 0 }).valid?).to be false end it "name is required to be a string or nil" do - expect(subject.create({ key: "" }).valid?).to be_truthy - expect(subject.create({ key: "", name: "My Name" }).valid?).to be_truthy - expect(subject.create({ key: "", name: 0 }).valid?).to be_falsey + expect(subject.create({ key: "" }).valid?).to be true + expect(subject.create({ key: "", name: "My Name" }).valid?).to be true + expect(subject.create({ key: "", name: 0 }).valid?).to be false end it "requires privateAttributeNames to be an array" do @@ -61,7 +61,7 @@ key: "user-key", privateAttributeNames: "not an array", } - expect(subject.create(context).valid?).to be_falsey + expect(subject.create(context).valid?).to be false end it "overwrite custom properties with built-ins when collisons occur" do @@ -97,33 +97,33 @@ expect(result).to be_a(LaunchDarkly::LDContext) expect(result.key).to eq("launchdarkly") expect(result.kind).to eq("org") - expect(result.valid?).to be_truthy + expect(result.valid?).to be true end it "do not allow empty strings or nil values for keys" do - expect(subject.create({ kind: "user", key: "" }).valid?).to be_falsey - expect(subject.create({ kind: "user", key: nil }).valid?).to be_falsey - expect(subject.create({ kind: "user" }).valid?).to be_falsey + expect(subject.create({ kind: "user", key: "" }).valid?).to be false + expect(subject.create({ kind: "user", key: nil }).valid?).to be false + expect(subject.create({ kind: "user" }).valid?).to be false end it "does not allow reserved names or empty values for kind" do - expect(subject.create({ kind: true, key: "key" }).valid?).to be_falsey - expect(subject.create({ kind: "", key: "key" }).valid?).to be_falsey - expect(subject.create({ kind: "kind", key: "key" }).valid?).to be_falsey - expect(subject.create({ kind: "multi", key: "key" }).valid?).to be_falsey + expect(subject.create({ kind: true, key: "key" }).valid?).to be false + expect(subject.create({ kind: "", key: "key" }).valid?).to be false + expect(subject.create({ kind: "kind", key: "key" }).valid?).to be false + expect(subject.create({ kind: "multi", key: "key" }).valid?).to be false end it "anonymous is required to be a boolean or nil" do - expect(subject.create({ key: "key", kind: "user" }).valid?).to be_truthy - expect(subject.create({ key: "key", kind: "user", anonymous: true }).valid?).to be_truthy - expect(subject.create({ key: "key", kind: "user", anonymous: false }).valid?).to be_truthy - expect(subject.create({ key: "key", kind: "user", anonymous: 0 }).valid?).to be_falsey + expect(subject.create({ key: "key", kind: "user" }).valid?).to be true + expect(subject.create({ key: "key", kind: "user", anonymous: true }).valid?).to be true + expect(subject.create({ key: "key", kind: "user", anonymous: false }).valid?).to be true + expect(subject.create({ key: "key", kind: "user", anonymous: 0 }).valid?).to be false end it "name is required to be a string or nil" do - expect(subject.create({ key: "key", kind: "user" }).valid?).to be_truthy - expect(subject.create({ key: "key", kind: "user", name: "My Name" }).valid?).to be_truthy - expect(subject.create({ key: "key", kind: "user", name: 0 }).valid?).to be_falsey + expect(subject.create({ key: "key", kind: "user" }).valid?).to be true + expect(subject.create({ key: "key", kind: "user", name: "My Name" }).valid?).to be true + expect(subject.create({ key: "key", kind: "user", name: 0 }).valid?).to be false end it "require privateAttributes to be an array" do @@ -134,7 +134,7 @@ privateAttributes: "not an array", }, } - expect(subject.create(context).valid?).to be_falsey + expect(subject.create(context).valid?).to be false end it "overwrite secondary property if also specified at top level" do @@ -161,7 +161,7 @@ expect(multi_context).to be_a(LaunchDarkly::LDContext) expect(multi_context.key).to be_nil expect(multi_context.kind).to eq("multi") - expect(multi_context.valid?).to be_truthy + expect(multi_context.valid?).to be true end it "can be created from a hash" do @@ -171,7 +171,7 @@ expect(multi_context).to be_a(LaunchDarkly::LDContext) expect(multi_context.key).to be_nil expect(multi_context.kind).to eq(LaunchDarkly::LDContext::KIND_MULTI) - expect(multi_context.valid?).to be_truthy + expect(multi_context.valid?).to be true end it "will return the single kind context if only one is provided" do @@ -189,12 +189,12 @@ multi_context = subject.create_multi([embedded_multi_context]) expect(multi_context).to be_a(LaunchDarkly::LDContext) - expect(multi_context.valid?).to be_falsey + expect(multi_context.valid?).to be false end it "are invalid if no contexts are provided" do multi_context = subject.create_multi([]) - expect(multi_context.valid?).to be_falsey + expect(multi_context.valid?).to be false end it "are invalid if a single context is invalid" do @@ -202,9 +202,9 @@ invalid_context = subject.create({ kind: "org" }) multi_context = subject.create_multi([valid_context, invalid_context]) - expect(valid_context.valid?).to be_truthy - expect(invalid_context.valid?).to be_falsey - expect(multi_context.valid?).to be_falsey + expect(valid_context.valid?).to be true + expect(invalid_context.valid?).to be false + expect(multi_context.valid?).to be false end end end @@ -213,7 +213,7 @@ it "invalid contexts have a size of 0" do context = subject.create({}) - expect(context.valid?).to be_falsey + expect(context.valid?).to be false expect(context.individual_context_count).to eq(0) end @@ -235,7 +235,7 @@ it "invalid contexts always return nil" do context = subject.create({kind: "user"}) - expect(context.valid?).to be_falsey + expect(context.valid?).to be false expect(context.individual_context(-1)).to be_nil expect(context.individual_context(0)).to be_nil expect(context.individual_context(1)).to be_nil @@ -246,7 +246,7 @@ it "single contexts can retrieve themselves" do context = subject.create({key: "user-key", kind: "user"}) - expect(context.valid?).to be_truthy + expect(context.valid?).to be true expect(context.individual_context(-1)).to be_nil expect(context.individual_context(0)).to eq(context) expect(context.individual_context(1)).to be_nil @@ -260,7 +260,7 @@ org_context = subject.create({ key: "org-key", kind: "org" }) multi_context = subject.create_multi([user_context, org_context]) - expect(multi_context.valid?).to be_truthy + expect(multi_context.valid?).to be true expect(multi_context.individual_context(-1)).to be_nil expect(multi_context.individual_context(0)).to eq(user_context) expect(multi_context.individual_context(1)).to eq(org_context) diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb index 7b1b6856..df880447 100644 --- a/spec/evaluation_detail_spec.rb +++ b/spec/evaluation_detail_spec.rb @@ -69,7 +69,7 @@ module LaunchDarkly expect(reason).not_to eq values[j][0] end end - if !unequal_values.nil? + unless unequal_values.nil? unequal_values.each do |v| expect(reason).not_to eq v end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index df84cd1e..183803fd 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -497,7 +497,7 @@ def feature_event(flag, user, variation, value, inline_user = false, timestamp = end def debug_event(flag, user, variation, value, timestamp = starting_timestamp) - out = { + { kind: 'debug', creationDate: timestamp, key: flag[:key], @@ -506,7 +506,6 @@ def debug_event(flag, user, variation, value, timestamp = starting_timestamp) value: value, user: user, } - out end def custom_event(user, key, data, metric_value, inline_user = false, timestamp = starting_timestamp) @@ -515,13 +514,13 @@ def custom_event(user, key, data, metric_value, inline_user = false, timestamp = creationDate: timestamp, key: key, } - out[:data] = data if !data.nil? + out[:data] = data unless data.nil? if inline_user out[:user] = user else out[:userKey] = user[:key] end - out[:metricValue] = metric_value if !metric_value.nil? + out[:metricValue] = metric_value unless metric_value.nil? out end diff --git a/spec/http_util.rb b/spec/http_util.rb index a2aad72b..99b1710f 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -62,7 +62,7 @@ def setup_status_response(uri_path, status, headers={}) def setup_ok_response(uri_path, body, content_type=nil, headers={}) setup_response(uri_path) do |req, res| res.status = 200 - res.content_type = content_type if !content_type.nil? + res.content_type = content_type unless content_type.nil? res.body = body headers.each { |n, v| res[n] = v } end @@ -80,7 +80,7 @@ def await_request def await_request_with_body r = @requests_queue.pop - return r[0], r[1] + [r[0], r[1]] end end @@ -96,7 +96,7 @@ def initialize def create_server(port, base_opts) WEBrick::HTTPProxyServer.new(base_opts.merge({ ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? + unless @connect_status.nil? res.status = @connect_status end @request_count += 1 diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index 7270f258..e30479ba 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -9,15 +9,15 @@ it "returns the expected bucket values for seed" do user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) - expect(bucket).to be_within(0.0000001).of(0.09801207); + expect(bucket).to be_within(0.0000001).of(0.09801207) user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) - expect(bucket).to be_within(0.0000001).of(0.14483777); + expect(bucket).to be_within(0.0000001).of(0.14483777) user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) - expect(bucket).to be_within(0.0000001).of(0.9242641); + expect(bucket).to be_within(0.0000001).of(0.9242641) end it "returns the same bucket regardless of hashKey and salt" do @@ -48,15 +48,15 @@ it "gets expected bucket values for specific keys" do user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) - expect(bucket).to be_within(0.0000001).of(0.42157587); + expect(bucket).to be_within(0.0000001).of(0.42157587) user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) - expect(bucket).to be_within(0.0000001).of(0.6708485); + expect(bucket).to be_within(0.0000001).of(0.6708485) user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) - expect(bucket).to be_within(0.0000001).of(0.10343106); + expect(bucket).to be_within(0.0000001).of(0.10343106) end it "can bucket by int value (equivalent to string)" do diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 30c1064e..6f0f6ecb 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -51,7 +51,7 @@ def with_unknown_segment(key) def with_big_segment_for_user(user, segment, included) user_key = user.key - @big_segment_memberships[user_key] = {} if !@big_segment_memberships.has_key?(user_key) + @big_segment_memberships[user_key] = {} unless @big_segment_memberships.has_key?(user_key) @big_segment_memberships[user_key][Evaluator.make_big_segment_ref(segment)] = included self end @@ -73,17 +73,17 @@ def build end private def get_flag(key) - raise "should not have requested flag #{key}" if !@flags.has_key?(key) + raise "should not have requested flag #{key}" unless @flags.has_key?(key) @flags[key] end private def get_segment(key) - raise "should not have requested segment #{key}" if !@segments.has_key?(key) + raise "should not have requested segment #{key}" unless @segments.has_key?(key) @segments[key] end private def get_big_segments(user_key) - raise "should not have requested big segments for #{user_key}" if !@big_segment_memberships.has_key?(user_key) + raise "should not have requested big segments for #{user_key}" unless @big_segment_memberships.has_key?(user_key) @big_segments_queries << user_key BigSegmentMembershipResult.new(@big_segment_memberships[user_key], @big_segments_status) end diff --git a/spec/impl/repeating_task_spec.rb b/spec/impl/repeating_task_spec.rb index 268d7320..89f4a408 100644 --- a/spec/impl/repeating_task_spec.rb +++ b/spec/impl/repeating_task_spec.rb @@ -29,7 +29,7 @@ def null_logger task.start 3.times do time = queue.pop - if !last.nil? + unless last.nil? expect(time.to_f - last.to_f).to be >=(0.05) end last = time diff --git a/spec/integrations/dynamodb_stores_spec.rb b/spec/integrations/dynamodb_stores_spec.rb index bf178dd0..a4429497 100644 --- a/spec/integrations/dynamodb_stores_spec.rb +++ b/spec/integrations/dynamodb_stores_spec.rb @@ -115,7 +115,7 @@ def set_big_segments(user_hash, includes, excludes) $DynamoDBBigSegmentStore::ATTR_EXCLUDED => Set.new(excludes), } sets.each do |attr_name, values| - if !values.empty? + unless values.empty? client.update_item( table_name: TABLE_NAME, key: { diff --git a/spec/integrations/store_wrapper_spec.rb b/spec/integrations/store_wrapper_spec.rb index e7890802..58def5a8 100644 --- a/spec/integrations/store_wrapper_spec.rb +++ b/spec/integrations/store_wrapper_spec.rb @@ -238,7 +238,7 @@ def initialize attr_accessor :inited def force_set(kind, item) - @data[kind] = {} if !@data.has_key?(kind) + @data[kind] = {} unless @data.has_key?(kind) @data[kind][item[:key]] = item end @@ -261,7 +261,7 @@ def get_all_internal(kind) end def upsert_internal(kind, item) - @data[kind] = {} if !@data.has_key?(kind) + @data[kind] = {} unless @data.has_key?(kind) old_item = @data[kind][item[:key]] return old_item if !old_item.nil? && old_item[:version] >= item[:version] @data[kind][item[:key]] = item diff --git a/spec/mock_components.rb b/spec/mock_components.rb index d859bfac..6bed7621 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -58,7 +58,7 @@ def initialize end def get_metadata - raise @metadata_error if !@metadata_error.nil? + raise @metadata_error unless @metadata_error.nil? @metadata end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index c6032bd0..3582d3db 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -52,7 +52,7 @@ def with_requestor(base_uri, opts = {}) server.setup_ok_response("/", { flags: { x: { key: "y" } } }.to_json) expect do requestor.request_all_data() - end.to output(/\[LDClient\] Got response from uri\:/).to_stdout_from_any_process + end.to output(/\[LDClient\] Got response from uri:/).to_stdout_from_any_process end end end From 540ee91fe4897d62d1ea663b02d1e3eaa1c2ad3b Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 28 Oct 2022 14:37:45 -0400 Subject: [PATCH 261/292] Remove support for secondary attribute (#213) As decided in the [spec], we are removing the special behavior of the secondary attribute. Going forward, secondary will be treated like any other attribute, and will no longer be included when determining the bucket for a context. [spec]: https://launchdarkly.atlassian.net/wiki/spaces/ENG/pages/2165212563/Consistent+and+Transparent+Rollout+Behavior+Unifying+Percent+Rollout+and+Traffic+Allocation --- Makefile | 1 - lib/ldclient-rb/context.rb | 19 ++++++------------- lib/ldclient-rb/events.rb | 2 +- lib/ldclient-rb/impl/evaluator_bucketing.rb | 4 ---- spec/context_spec.rb | 20 +++----------------- spec/events_spec.rb | 4 ++-- spec/impl/evaluator_rule_spec.rb | 17 +++-------------- 7 files changed, 15 insertions(+), 52 deletions(-) diff --git a/Makefile b/Makefile index 868860d2..a48d64b2 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,6 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log # - "events": These test suites will be unavailable until more of the U2C implementation is done. TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ -skip 'evaluation/bucketing/bucket by non-key attribute/in rollouts/string value/complex attribute reference' \ - -skip 'evaluation/bucketing/secondary' \ -skip 'evaluation/parameterized/attribute references' \ -skip 'evaluation/parameterized/bad attribute reference errors' \ -skip 'evaluation/parameterized/prerequisites' \ diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 28ebc6f1..91740335 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -36,18 +36,16 @@ class LDContext # @param kind [String, nil] # @param name [String, nil] # @param anonymous [Boolean, nil] - # @param secondary [String, nil] # @param attributes [Hash, nil] # @param private_attributes [Array, nil] # @param error [String, nil] # @param contexts [Array, nil] # - def initialize(key, kind, name = nil, anonymous = nil, secondary = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) + def initialize(key, kind, name = nil, anonymous = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) @key = key @kind = kind @name = name @anonymous = anonymous || false - @secondary = secondary @attributes = attributes @private_attributes = private_attributes @error = error @@ -235,8 +233,6 @@ def individual_context(kind) @name when :anonymous @anonymous - when :secondary - @secondary else @attributes&.fetch(name, nil) end @@ -322,7 +318,7 @@ def self.create_multi(contexts) return contexts[0] if contexts.length == 1 - new(nil, "multi", nil, false, nil, nil, nil, nil, contexts) + new(nil, "multi", nil, false, nil, nil, nil, contexts) end # @@ -330,7 +326,7 @@ def self.create_multi(contexts) # @return [LDContext] # private_class_method def self.create_invalid_context(error) - new(nil, nil, nil, false, nil, nil, nil, error) + new(nil, nil, nil, false, nil, nil, error) end # @@ -376,7 +372,7 @@ def self.create_multi(contexts) return create_invalid_context("The provided private attributes are not an array") end - new(key.to_s, KIND_DEFAULT, name, anonymous, data[:secondary], attributes, private_attributes) + new(key.to_s, KIND_DEFAULT, name, anonymous, attributes, private_attributes) end # @@ -416,13 +412,10 @@ def self.create_multi(contexts) # We only need to create an attribute hash if there are keys set outside # of the ones we store in dedicated instance variables. - # - # :secondary is not a supported top level key in the new schema. - # However, someone could still include it so we need to ignore it. attributes = nil data.each do |k, v| case k - when :kind, :key, :name, :anonymous, :secondary, :_meta + when :kind, :key, :name, :anonymous, :_meta next else attributes ||= {} @@ -430,7 +423,7 @@ def self.create_multi(contexts) end end - new(key.to_s, kind, name, anonymous, meta[:secondary], attributes, private_attributes) + new(key.to_s, kind, name, anonymous, attributes, private_attributes) end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index fd3d1bba..1ad70e95 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -62,7 +62,7 @@ def stop end MAX_FLUSH_WORKERS = 5 - USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, + USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :ip, :country, :email, :firstName, :lastName, :avatar, :name ] private_constant :MAX_FLUSH_WORKERS diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index 3b995ed6..37b061f0 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -60,10 +60,6 @@ def self.bucket_context(context, context_kind, key, bucket_by, salt, seed) id_hash = bucketable_string_value(context_value) return 0.0 if id_hash.nil? - if matched_context.get_value(:secondary) - id_hash += "." + matched_context.get_value(:secondary).to_s - end - if seed hash_key = "%d.%s" % [seed, id_hash] else diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 253d3afe..10251b40 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -67,16 +67,16 @@ it "overwrite custom properties with built-ins when collisons occur" do context = { key: "user-key", - secondary: "secondary", + ip: "192.168.1.1", avatar: "avatar", custom: { - secondary: "custom secondary", + ip: "127.0.0.1", avatar: "custom avatar", }, } result = subject.create(context) - expect(result.get_value(:secondary)).to eq("secondary") + expect(result.get_value(:ip)).to eq("192.168.1.1") expect(result.get_value(:avatar)).to eq("avatar") end end @@ -136,20 +136,6 @@ } expect(subject.create(context).valid?).to be false end - - it "overwrite secondary property if also specified at top level" do - context = { - key: "user-key", - kind: "user", - secondary: "invalid secondary", - _meta: { - secondary: "real secondary", - }, - } - - result = subject.create(context) - expect(result.get_value(:secondary)).to eq("real secondary") - end end describe "multi-kind contexts" do diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 183803fd..1e875074 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -13,9 +13,9 @@ let(:default_config) { LaunchDarkly::Config.new(default_config_opts) } let(:user) { { key: "userkey", name: "Red" } } let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } - let(:numeric_user) { { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, + let(:numeric_user) { { key: 1, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, avatar: 8, name: 9, anonymous: false, custom: { age: 99 } } } - let(:stringified_numeric_user) { { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', + let(:stringified_numeric_user) { { key: '1', ip: '3', country: '4', email: '5', firstName: '6', lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } def with_processor_and_sender(config) diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 211090f7..6e06a465 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -81,17 +81,6 @@ module Impl expect(result.detail.value).to eq(true) end - it "coerces secondary key to a string for evaluation" do - # We can't really verify that the rollout calculation works correctly, but we can at least - # make sure it doesn't error out if there's a non-string secondary value (ch35189) - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } - flag = factory.boolean_flag_with_rules([rule]) - context = LDContext.create({ key: "userkey", secondary: 999 }) - result = basic_evaluator.evaluate(flag, context) - expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) - end - describe "rule experiment/rollout behavior" do it "evaluates rollout for rule" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], @@ -122,7 +111,7 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = factory.boolean_flag_with_rules([rule]) - context = LDContext.create({ key: "userkey", secondary: 999 }) + context = LDContext.create({ key: "userkey" }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) @@ -132,7 +121,7 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } flag = factory.boolean_flag_with_rules([rule]) - context = LDContext.create({ key: "userkey", secondary: 999 }) + context = LDContext.create({ key: "userkey" }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) @@ -142,7 +131,7 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } flag = factory.boolean_flag_with_rules([rule]) - context = LDContext.create({ key: "userkey", secondary: 999 }) + context = LDContext.create({ key: "userkey" }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) From 1bc91866ceda6998934db96fcca93f60934ce698 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Wed, 2 Nov 2022 09:54:53 -0400 Subject: [PATCH 262/292] Remove deprecated APIs (#214) Since the users to context change requires a version break, this is the perfect time to remove previously deprecated bits of functionality. This includes: - Removing `update_processor*` config entries - `FileDataSource` entry point - `RedisFeatureStore` entry point - `Redis::sadd?` warning in unit tests --- lib/ldclient-rb.rb | 1 - lib/ldclient-rb/config.rb | 12 +-- lib/ldclient-rb/file_data_source.rb | 23 ----- .../impl/integrations/redis_impl.rb | 81 +++++++++++++++++ lib/ldclient-rb/integrations/redis.rb | 4 +- lib/ldclient-rb/redis_store.rb | 88 ------------------- spec/integrations/redis_stores_spec.rb | 4 +- 7 files changed, 86 insertions(+), 127 deletions(-) delete mode 100644 lib/ldclient-rb/file_data_source.rb delete mode 100644 lib/ldclient-rb/redis_store.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 6a57c953..8ca766e5 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -25,5 +25,4 @@ module LaunchDarkly require "ldclient-rb/non_blocking_thread_pool" require "ldclient-rb/events" require "ldclient-rb/requestor" -require "ldclient-rb/file_data_source" require "ldclient-rb/integrations" diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 66a22d39..f3e483aa 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -36,8 +36,6 @@ class Config # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. # @option opts [Object] :data_source See {#data_source}. - # @option opts [Object] :update_processor Obsolete synonym for `data_source`. - # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. # @option opts [String] :wrapper_name See {#wrapper_name}. @@ -68,9 +66,7 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false - @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] - @update_processor = opts[:update_processor] - @update_processor_factory = opts[:update_processor_factory] + @data_source = opts[:data_source] @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval @@ -304,12 +300,6 @@ def offline? # attr_reader :application - # @deprecated This is replaced by {#data_source}. - attr_reader :update_processor - - # @deprecated This is replaced by {#data_source}. - attr_reader :update_processor_factory - # # Set to true to opt out of sending diagnostics data. # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb deleted file mode 100644 index 30440353..00000000 --- a/lib/ldclient-rb/file_data_source.rb +++ /dev/null @@ -1,23 +0,0 @@ -require "ldclient-rb/integrations/file_data" - -module LaunchDarkly - # - # Deprecated entry point for the file data source feature. - # - # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}. - # - # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData}. - # - class FileDataSource - # - # Deprecated entry point for the file data source feature. - # - # The new preferred usage is {LaunchDarkly::Integrations::FileData#data_source}. - # - # @deprecated This is replaced by {LaunchDarkly::Integrations::FileData#data_source}. - # - def self.factory(options={}) - LaunchDarkly::Integrations::FileData.data_source(options) - end - end -end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index c1e5854a..783af2a2 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -5,6 +5,87 @@ module LaunchDarkly module Impl module Integrations module Redis + # + # An implementation of the LaunchDarkly client's feature store that uses a Redis + # instance. This object holds feature flags and related data received from the + # streaming API. Feature data can also be further cached in memory to reduce overhead + # of calls to Redis. + # + # To use this class, you must first have the `redis` and `connection-pool` gems + # installed. Then, create an instance and store it in the `feature_store` property + # of your client configuration. + # + class RedisFeatureStore + include LaunchDarkly::Interfaces::FeatureStore + + # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating + # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate + # away from exposing these concrete classes and use factory methods instead. + + # + # Constructor for a RedisFeatureStore instance. + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just redis_url) + # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally + # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. + # + def initialize(opts = {}) + core = RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + + # + # Default value for the `redis_url` constructor parameter; points to an instance of Redis + # running at `localhost` with its default port. + # + def self.default_redis_url + LaunchDarkly::Integrations::Redis::default_redis_url + end + + # + # Default value for the `prefix` constructor parameter. + # + def self.default_prefix + LaunchDarkly::Integrations::Redis::default_prefix + end + + def get(kind, key) + @wrapper.get(kind, key) + end + + def all(kind) + @wrapper.all(kind) + end + + def delete(kind, key, version) + @wrapper.delete(kind, key, version) + end + + def init(all_data) + @wrapper.init(all_data) + end + + def upsert(kind, item) + @wrapper.upsert(kind, item) + end + + def initialized? + @wrapper.initialized? + end + + def stop + @wrapper.stop + end + end + class RedisStoreImplBase begin require "redis" diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 1d2e579b..10f977a0 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -1,4 +1,4 @@ -require "ldclient-rb/redis_store" # eventually we will just refer to impl/integrations/redis_impl directly +require "ldclient-rb/impl/integrations/redis_impl" module LaunchDarkly module Integrations @@ -59,7 +59,7 @@ def self.default_prefix # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(opts = {}) - RedisFeatureStore.new(opts) + LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStore.new(opts) end # diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb deleted file mode 100644 index b94e61f2..00000000 --- a/lib/ldclient-rb/redis_store.rb +++ /dev/null @@ -1,88 +0,0 @@ -require "ldclient-rb/interfaces" -require "ldclient-rb/impl/integrations/redis_impl" - -module LaunchDarkly - # - # An implementation of the LaunchDarkly client's feature store that uses a Redis - # instance. This object holds feature flags and related data received from the - # streaming API. Feature data can also be further cached in memory to reduce overhead - # of calls to Redis. - # - # To use this class, you must first have the `redis` and `connection-pool` gems - # installed. Then, create an instance and store it in the `feature_store` property - # of your client configuration. - # - # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific - # implementation class may be changed or removed in the future. - # - class RedisFeatureStore - include LaunchDarkly::Interfaces::FeatureStore - - # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating - # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical - # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate - # away from exposing these concrete classes and use factory methods instead. - - # - # Constructor for a RedisFeatureStore instance. - # - # @param opts [Hash] the configuration options - # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just redis_url) - # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly - # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching - # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally - # @option opts [Object] :pool custom connection pool, if desired - # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. - # - def initialize(opts = {}) - core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) - end - - # - # Default value for the `redis_url` constructor parameter; points to an instance of Redis - # running at `localhost` with its default port. - # - def self.default_redis_url - LaunchDarkly::Integrations::Redis::default_redis_url - end - - # - # Default value for the `prefix` constructor parameter. - # - def self.default_prefix - LaunchDarkly::Integrations::Redis::default_prefix - end - - def get(kind, key) - @wrapper.get(kind, key) - end - - def all(kind) - @wrapper.all(kind) - end - - def delete(kind, key, version) - @wrapper.delete(kind, key, version) - end - - def init(all_data) - @wrapper.init(all_data) - end - - def upsert(kind, item) - @wrapper.upsert(kind, item) - end - - def initialized? - @wrapper.initialized? - end - - def stop - @wrapper.stop - end - end -end diff --git a/spec/integrations/redis_stores_spec.rb b/spec/integrations/redis_stores_spec.rb index ea01d7db..f52b3643 100644 --- a/spec/integrations/redis_stores_spec.rb +++ b/spec/integrations/redis_stores_spec.rb @@ -49,10 +49,10 @@ def set_big_segments_metadata(metadata) def set_big_segments(user_hash, includes, excludes) with_redis_test_client do |client| includes.each do |ref| - client.sadd(@actual_prefix + $RedisBigSegmentStore::KEY_USER_INCLUDE + user_hash, ref) + client.sadd?(@actual_prefix + $RedisBigSegmentStore::KEY_USER_INCLUDE + user_hash, ref) end excludes.each do |ref| - client.sadd(@actual_prefix + $RedisBigSegmentStore::KEY_USER_EXCLUDE + user_hash, ref) + client.sadd?(@actual_prefix + $RedisBigSegmentStore::KEY_USER_EXCLUDE + user_hash, ref) end end end From 7f35335547ba92c0bbb3f9dd591a83871e16970d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 2 Nov 2022 18:36:22 -0700 Subject: [PATCH 263/292] store data model with classes that aren't Hash --- lib/ldclient-rb/impl/evaluator.rb | 170 +++++++------- lib/ldclient-rb/impl/evaluator_helpers.rb | 43 +--- .../impl/integrations/file_data_source.rb | 4 +- lib/ldclient-rb/impl/model/clause.rb | 38 +++ lib/ldclient-rb/impl/model/feature_flag.rb | 164 +++++++++++++ .../impl/model/preprocessed_data.rb | 133 +---------- lib/ldclient-rb/impl/model/segment.rb | 119 ++++++++++ lib/ldclient-rb/impl/model/serialization.rb | 35 ++- lib/ldclient-rb/integrations/test_data.rb | 12 +- lib/ldclient-rb/stream.rb | 5 +- spec/impl/evaluator_big_segments_spec.rb | 42 ++-- spec/impl/evaluator_clause_spec.rb | 20 +- spec/impl/evaluator_rule_spec.rb | 66 +++--- spec/impl/evaluator_segment_spec.rb | 109 ++++----- spec/impl/evaluator_spec.rb | 218 +++++++++--------- spec/impl/evaluator_spec_base.rb | 48 +--- spec/impl/model/preprocessed_data_spec.rb | 11 +- spec/impl/model/serialization_spec.rb | 30 +-- spec/integrations/test_data_spec.rb | 18 +- spec/model_builders.rb | 145 ++++++++---- spec/requestor_spec.rb | 40 ++-- spec/stream_spec.rb | 10 +- 22 files changed, 826 insertions(+), 654 deletions(-) create mode 100644 lib/ldclient-rb/impl/model/clause.rb create mode 100644 lib/ldclient-rb/impl/model/feature_flag.rb create mode 100644 lib/ldclient-rb/impl/model/segment.rb diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 9b0e641f..c592a2ba 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -2,6 +2,8 @@ require "ldclient-rb/impl/evaluator_bucketing" require "ldclient-rb/impl/evaluator_helpers" require "ldclient-rb/impl/evaluator_operators" +require "ldclient-rb/impl/model/feature_flag" +require "ldclient-rb/impl/model/segment" module LaunchDarkly module Impl @@ -56,8 +58,8 @@ def self.error_result(errorKind, value = nil) # any events that were generated for prerequisite flags; its `value` will be `nil` if the flag returns the # default value. Error conditions produce a result with a nil value and an error reason, not an exception. # - # @param flag [Object] the flag - # @param context [LDContext] the context properties + # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag + # @param context [LaunchDarkly::LDContext] the evaluation context # @return [EvalResult] the evaluation result def evaluate(flag, context) result = EvalResult.new @@ -72,124 +74,125 @@ def evaluate(flag, context) result end + # @param segment [LaunchDarkly::Impl::Model::Segment] def self.make_big_segment_ref(segment) # method is visible for testing # The format of Big Segment references is independent of what store implementation is being # used; the store implementation receives only this string and does not know the details of # the data model. The Relay Proxy will use the same format when writing to the store. - "#{segment[:key]}.g#{segment[:generation]}" + "#{segment.key}.g#{segment.generation}" end private + # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag + # @param context [LaunchDarkly::LDContext] the evaluation context + # @param state [EvalResult] def eval_internal(flag, context, state) - unless flag[:on] - return EvaluatorHelpers.off_result(flag) + unless flag.on + return flag.off_result end prereq_failure_result = check_prerequisites(flag, context, state) return prereq_failure_result unless prereq_failure_result.nil? # Check context target matches - (flag[:targets] || []).each do |target| - (target[:values] || []).each do |value| + flag.targets.each do |target| + target.values.each do |value| if value == context.key - return EvaluatorHelpers.target_match_result(target, flag) + return target.match_result end end end # Check custom rules - rules = flag[:rules] || [] - rules.each_index do |i| - rule = rules[i] + flag.rules.each do |rule| if rule_match_context(rule, context, state) - reason = rule[:_reason] # try to use cached reason for this rule - reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? - return get_value_for_variation_or_rollout(flag, rule, context, reason, - EvaluatorHelpers.rule_precomputed_results(rule)) + return get_value_for_variation_or_rollout(flag, rule, context, rule.match_results) end end # Check the fallthrough rule - unless flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], context, EvaluationReason::fallthrough, - EvaluatorHelpers.fallthrough_precomputed_results(flag)) + unless flag.fallthrough.nil? + return get_value_for_variation_or_rollout(flag, flag.fallthrough, context, flag.fallthrough_results) end EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end + # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag + # @param context [LaunchDarkly::LDContext] the evaluation context + # @param state [EvalResult] def check_prerequisites(flag, context, state) - (flag[:prerequisites] || []).each do |prerequisite| + flag.prerequisites.each do |prerequisite| prereq_ok = true - prereq_key = prerequisite[:key] + prereq_key = prerequisite.key prereq_flag = @get_flag.call(prereq_key) if prereq_flag.nil? - @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } + @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag.key}\"" } prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, context, state) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. - if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] + if !prereq_flag.on || prereq_res.variation_index != prerequisite.variation prereq_ok = false end prereq_eval = PrerequisiteEvalRecord.new(prereq_flag, flag, prereq_res) state.prereq_evals = [] if state.prereq_evals.nil? state.prereq_evals.push(prereq_eval) rescue => exn - Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) + Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag.key}\"", exn) prereq_ok = false end end unless prereq_ok - return EvaluatorHelpers.prerequisite_failed_result(prerequisite, flag) + return prerequisite.failure_result end end nil end + # @param rule [LaunchDarkly::Impl::Model::FlagRule] + # @param context [LaunchDarkly::LDContext] + # @param state [EvalResult] def rule_match_context(rule, context, state) - return false unless rule[:clauses] - - (rule[:clauses] || []).each do |clause| + rule.clauses.each do |clause| return false unless clause_match_context(clause, context, state) end true end + # @param clause [LaunchDarkly::Impl::Model::Clause] + # @param context [LaunchDarkly::LDContext] + # @param state [EvalResult] def clause_match_context(clause, context, state) # In the case of a segment match operator, we check if the context is in any of the segments, # and possibly negate - if clause[:op].to_sym == :segmentMatch - result = (clause[:values] || []).any? { |v| + if clause.op == :segmentMatch + result = clause.values.any? { |v| segment = @get_segment.call(v) !segment.nil? && segment_match_context(segment, context, state) } - clause[:negate] ? !result : result + clause.negate ? !result : result else clause_match_context_no_segments(clause, context) end end - # - # @param clause [Hash] + # @param clause [LaunchDarkly::Impl::Model::Clause] # @param context_value [any] # @return [Boolean] - # private def match_any_clause_value(clause, context_value) - op = clause[:op].to_sym - clause[:values].any? { |cv| EvaluatorOperators.apply(op, context_value, cv) } + op = clause.op + clause.values.any? { |cv| EvaluatorOperators.apply(op, context_value, cv) } end - # - # @param clause [Hash] + # @param clause [LaunchDarkly::Impl::Model::Clause] # @param context [LaunchDarkly::LDContext] # @return [Boolean] - # private def clause_match_by_kind(clause, context) # If attribute is "kind", then we treat operator and values as a match # expression against a list of all individual kinds in the context. @@ -207,21 +210,19 @@ def clause_match_context(clause, context, state) false end - # - # @param clause [Hash] + # @param clause [LaunchDarkly::Impl::Model::Clause] # @param context [LaunchDarkly::LDContext] # @return [Boolean] - # def clause_match_context_no_segments(clause, context) - if clause[:attribute] == "kind" + if clause.attribute == "kind" result = clause_match_by_kind(clause, context) - return clause[:negate] ? !result : result + return clause.negate ? !result : result end - matched_context = context.individual_context(clause[:contextKind] || LaunchDarkly::LDContext::KIND_DEFAULT) + matched_context = context.individual_context(clause.context_kind || LaunchDarkly::LDContext::KIND_DEFAULT) return false if matched_context.nil? - user_val = matched_context.get_value(clause[:attribute]) + user_val = matched_context.get_value(clause.attribute) return false if user_val.nil? result = if user_val.is_a? Enumerable @@ -229,15 +230,21 @@ def clause_match_context_no_segments(clause, context) else match_any_clause_value(clause, user_val) end - clause[:negate] ? !result : result + clause.negate ? !result : result end + # @param segment [LaunchDarkly::Impl::Model::Segment] + # @param context [LaunchDarkly::LDContext] + # @return [Boolean] def segment_match_context(segment, context, state) - segment[:unbounded] ? big_segment_match_context(segment, context, state) : simple_segment_match_context(segment, context, true) + segment.unbounded ? big_segment_match_context(segment, context, state) : simple_segment_match_context(segment, context, true) end + # @param segment [LaunchDarkly::Impl::Model::Segment] + # @param context [LaunchDarkly::LDContext] + # @return [Boolean] def big_segment_match_context(segment, context, state) - unless segment[:generation] + unless segment.generation # Big segment queries can only be done if the generation is known. If it's unset, # that probably means the data store was populated by an older SDK that doesn't know # about the generation property and therefore dropped it from the JSON data. We'll treat @@ -262,83 +269,68 @@ def big_segment_match_context(segment, context, state) simple_segment_match_context(segment, context, false) end + # @param segment [LaunchDarkly::Impl::Model::Segment] + # @param context [LaunchDarkly::LDContext] + # @param use_includes_and_excludes [Boolean] + # @return [Boolean] def simple_segment_match_context(segment, context, use_includes_and_excludes) if use_includes_and_excludes - if EvaluatorHelpers.context_key_in_target_list(context, nil, segment[:included]) + if EvaluatorHelpers.context_key_in_target_list(context, nil, segment.included) return true end - # @type [Enumerable] - included_contexts = segment[:includedContexts] - if included_contexts.is_a?(Enumerable) - included_contexts.each do |ctx| - return false unless ctx.is_a? Hash - - if EvaluatorHelpers.context_key_in_target_list(context, ctx[:contextKind], ctx[:values]) - return true - end + segment.included_contexts.each do |target| + if EvaluatorHelpers.context_key_in_target_list(context, target.context_kind, target.values) + return true end end - if EvaluatorHelpers.context_key_in_target_list(context, nil, segment[:excluded]) + if EvaluatorHelpers.context_key_in_target_list(context, nil, segment.excluded) return false end - # @type [Enumerable] - excluded_contexts = segment[:excludedContexts] - if excluded_contexts.is_a?(Enumerable) - excluded_contexts.each do |ctx| - return false unless ctx.is_a? Hash - - if EvaluatorHelpers.context_key_in_target_list(context, ctx[:contextKind], ctx[:values]) - return false - end + segment.excluded_contexts.each do |target| + if EvaluatorHelpers.context_key_in_target_list(context, target.context_kind, target.values) + return false end end end - (segment[:rules] || []).each do |r| - return true if segment_rule_match_context(r, context, segment[:key], segment[:salt]) + segment.rules.each do |r| + return true if segment_rule_match_context(r, context, segment.key, segment.salt) end false end + # @param rule [LaunchDarkly::Impl::Model::SegmentRule] + # @param context [LaunchDarkly::LDContext] + # @param segment_key [String] + # @param salt [String] + # @return [Boolean] def segment_rule_match_context(rule, context, segment_key, salt) - (rule[:clauses] || []).each do |c| + rule.clauses.each do |c| return false unless clause_match_context_no_segments(c, context) end # If the weight is absent, this rule matches - return true unless rule[:weight] + return true unless rule.weight # All of the clauses are met. See if the user buckets in - bucket = EvaluatorBucketing.bucket_context(context, rule[:rolloutContextKind], segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) - weight = rule[:weight].to_f / 100000.0 + bucket = EvaluatorBucketing.bucket_context(context, rule.rollout_context_kind, segment_key, rule.bucket_by || "key", salt, nil) + weight = rule.weight.to_f / 100000.0 bucket.nil? || bucket < weight end private - def get_value_for_variation_or_rollout(flag, vr, context, reason, precomputed_results) + def get_value_for_variation_or_rollout(flag, vr, context, precomputed_results) index, in_experiment = EvaluatorBucketing.variation_index_for_context(flag, vr, context) if index.nil? - @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") + @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag.key}\": variation/rollout object with no variation or rollout") return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end - if precomputed_results - precomputed_results.for_variation(index, in_experiment) - else - #if in experiment is true, set reason to a different reason instance/singleton with in_experiment set - if in_experiment - if reason.kind == :FALLTHROUGH - reason = EvaluationReason::fallthrough(in_experiment) - elsif reason.kind == :RULE_MATCH - reason = EvaluationReason::rule_match(reason.rule_index, reason.rule_id, in_experiment) - end - end - EvaluatorHelpers.evaluation_detail_for_variation(flag, index, reason) - end + precomputed_results.for_variation(index, in_experiment) end end end diff --git a/lib/ldclient-rb/impl/evaluator_helpers.rb b/lib/ldclient-rb/impl/evaluator_helpers.rb index 41914afb..60ef2240 100644 --- a/lib/ldclient-rb/impl/evaluator_helpers.rb +++ b/lib/ldclient-rb/impl/evaluator_helpers.rb @@ -6,43 +6,24 @@ module LaunchDarkly module Impl module EvaluatorHelpers - def self.off_result(flag, logger = nil) - pre = flag[:_preprocessed] - pre ? pre.off_result : evaluation_detail_for_off_variation(flag, EvaluationReason::off, logger) - end - - def self.target_match_result(target, flag, logger = nil) - pre = target[:_preprocessed] - pre ? pre.match_result : evaluation_detail_for_variation( - flag, target[:variation], EvaluationReason::target_match, logger) - end - - def self.prerequisite_failed_result(prereq, flag, logger = nil) - pre = prereq[:_preprocessed] - pre ? pre.failed_result : evaluation_detail_for_off_variation( - flag, EvaluationReason::prerequisite_failed(prereq[:key]), logger - ) - end - - def self.fallthrough_precomputed_results(flag) - pre = flag[:_preprocessed] - pre ? pre.fallthrough_factory : nil - end - - def self.rule_precomputed_results(rule) - pre = rule[:_preprocessed] - pre ? pre.all_match_results : nil - end - + # + # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] + # @param reason [LaunchDarkly::EvaluationReason] + # def self.evaluation_detail_for_off_variation(flag, reason, logger = nil) - index = flag[:offVariation] + index = flag.off_variation index.nil? ? EvaluationDetail.new(nil, nil, reason) : evaluation_detail_for_variation(flag, index, reason, logger) end + # + # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] + # @param index [Integer] + # @param reason [LaunchDarkly::EvaluationReason] + # def self.evaluation_detail_for_variation(flag, index, reason, logger = nil) - vars = flag[:variations] || [] + vars = flag.variations if index < 0 || index >= vars.length - logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") unless logger.nil? + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag.key}\": invalid variation index") unless logger.nil? EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) else EvaluationDetail.new(vars[index], index, reason) diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb index 9ef50ef8..d8f22745 100644 --- a/lib/ldclient-rb/impl/integrations/file_data_source.rb +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -121,12 +121,12 @@ def symbolize_all_keys(value) def add_item(all_data, kind, item) items = all_data[kind] - raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash + raise ArgumentError, "Received unknown item kind #{kind[:namespace]} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash key = item[:key].to_sym unless items[key].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[key] = item + items[key] = Model.deserialize(kind, item) end def make_flag_with_value(key, value) diff --git a/lib/ldclient-rb/impl/model/clause.rb b/lib/ldclient-rb/impl/model/clause.rb new file mode 100644 index 00000000..2e30a004 --- /dev/null +++ b/lib/ldclient-rb/impl/model/clause.rb @@ -0,0 +1,38 @@ + +module LaunchDarkly + module Impl + module Model + class Clause + def initialize(data) + @data = data + @context_kind = data[:contextKind] + @attribute = data[:attribute] + @op = data[:op].to_sym + @values = data[:values] || [] + @negate = !!data[:negate] + end + + # @return [Hash] + attr_reader :data + # @return [String|nil] + attr_reader :context_kind + # @return [String] + attr_reader :attribute + # @return [Symbol] + attr_reader :op + # @return [Array] + attr_reader :values + # @return [Boolean] + attr_reader :negate + + def [](key) + @data[key] + end + + def as_json + @data + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb new file mode 100644 index 00000000..2a094e51 --- /dev/null +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -0,0 +1,164 @@ +require "ldclient-rb/impl/evaluator_helpers" +require "ldclient-rb/impl/model/clause" + +module LaunchDarkly + module Impl + module Model + class FeatureFlag + # @param data [Hash] + # @param logger [Logger|nil] + def initialize(data, logger = nil) + raise ArgumentError, "expected hash but got #{data.class}" unless data.is_a?(Hash) + @data = data + @key = data[:key] + @version = data[:version] + @deleted = !!data[:deleted] + return if @deleted + @variations = data[:variations] || [] + @on = !!data[:on] + @fallthrough = data[:fallthrough] + @off_variation = data[:offVariation] + @off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off, logger) + @fallthrough_results = Preprocessor.precompute_multi_variation_results(self, + EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true)) + @prerequisites = (data[:prerequisites] || []).map do |prereq_data| + Prerequisite.new(prereq_data, self, logger) + end + @targets = (data[:targets] || []).map do |target_data| + Target.new(target_data, self, logger) + end + @rules = (data[:rules] || []).map.with_index do |rule_data, index| + FlagRule.new(rule_data, index, self) + end + end + + # @return [Hash] + attr_reader :data + # @return [String] + attr_reader :key + # @return [Integer] + attr_reader :version + # @return [Boolean] + attr_reader :deleted + # @return [Array] + attr_reader :variations + # @return [Boolean] + attr_reader :on + # @return [Integer|nil] + attr_reader :off_variation + # @return [Hash] + attr_reader :fallthrough + # @return [LaunchDarkly::EvaluationDetail] + attr_reader :off_result + # @return [LaunchDarkly::Impl::Model::EvalResultFactoryMultiVariations] + attr_reader :fallthrough_results + # @return [Array] + attr_reader :prerequisites + # @return [Array] + attr_reader :targets + # @return [Array] + attr_reader :rules + + # This method allows us to read properties of the object as if it's just a hash; we can remove it if we + # migrate entirely to using attributes of the class + def [](key) + @data[key] + end + + def ==(other) + other.is_a?(FeatureFlag) && other.data == self.data + end + + def as_json(*) # parameter is unused, but may be passed if we're using the json gem + @data + end + + # Same as as_json, but converts the JSON structure into a string. + def to_json(*a) + as_json.to_json(a) + end + end + + class Prerequisite + def initialize(data, flag, logger) + @data = data + @key = data[:key] + @variation = data[:variation] + @failure_result = EvaluatorHelpers.evaluation_detail_for_off_variation(flag, + EvaluationReason::prerequisite_failed(@key), logger) + end + + # @return [Hash] + attr_reader :data + # @return [String] + attr_reader :key + # @return [Integer] + attr_reader :variation + # @return [LaunchDarkly::EvaluationDetail] + attr_reader :failure_result + + def as_json + @data + end + end + + class Target + def initialize(data, flag, logger) + @data = data + @values = data[:values] || [] + @match_result = EvaluatorHelpers.evaluation_detail_for_variation(flag, + data[:variation], EvaluationReason::target_match, logger) + end + + # @return [Hash] + attr_reader :data + # @return [Array] + attr_reader :values + # @return [LaunchDarkly::EvaluationDetail] + attr_reader :match_result + + # This method allows us to read properties of the object as if it's just a hash; we can remove it if we + # migrate entirely to using attributes of the class + def [](key) + @data[key] + end + + def as_json + @data + end + end + + class FlagRule + def initialize(data, rule_index, flag) + @data = data + @clauses = (data[:clauses] || []).map do |clause_data| + Clause.new(clause_data) + end + rule_id = data[:id] + match_reason = EvaluationReason::rule_match(rule_index, rule_id) + match_reason_in_experiment = EvaluationReason::rule_match(rule_index, rule_id, true) + @match_results = Preprocessor.precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment) + end + + # @return [Hash] + attr_reader :data + # @return [Array] + attr_reader :clauses + # @return [LaunchDarkly::Impl::Model::EvalResultFactoryMultiVariations] + attr_reader :match_results + + # This method allows us to read properties of the object as if it's just a hash; we can remove it if we + # migrate entirely to using attributes of the class + def [](key) + @data[key] + end + + def as_json + @data + end + end + + # Clause is defined in its own file because clauses are used by both flags and segments + end + end +end diff --git a/lib/ldclient-rb/impl/model/preprocessed_data.rb b/lib/ldclient-rb/impl/model/preprocessed_data.rb index ee00e711..088add02 100644 --- a/lib/ldclient-rb/impl/model/preprocessed_data.rb +++ b/lib/ldclient-rb/impl/model/preprocessed_data.rb @@ -2,7 +2,7 @@ module LaunchDarkly module Impl - module DataModelPreprocessing + module Model # # Container for a precomputed result that includes a specific variation index and value, an # evaluation reason, and optionally an alternate evaluation reason that corresponds to the @@ -18,7 +18,7 @@ def initialize(value, variation_index, regular_reason, in_experiment_reason = ni # @param in_experiment [Boolean] indicates whether we want the result to include # "inExperiment: true" in the reason or not - # @return [EvaluationDetail] + # @return [LaunchDarkly::EvaluationDetail] def get_result(in_experiment = false) in_experiment ? @in_experiment_result : @regular_result end @@ -35,6 +35,7 @@ def initialize(variation_factories) # @param index [Integer] the variation index # @param in_experiment [Boolean] indicates whether we want the result to include # "inExperiment: true" in the reason or not + # @return [LaunchDarkly::EvaluationDetail] def for_variation(index, in_experiment) if index < 0 || index >= @factories.length EvaluationDetail.new(nil, nil, EvaluationReason.error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -44,130 +45,12 @@ def for_variation(index, in_experiment) end end - # Base class for all of the preprocessed data classes we embed in our data model. Using this class - # ensures that none of its properties will be included in JSON representations. It also overrides - # == to say that it is always equal with another instance of the same class; equality tests on - # this class are only ever done in test code, and we want the contents of these classes to be - # ignored in test code unless we are looking at specific attributes. - class PreprocessedDataBase - def as_json(*) - nil - end - - def to_json(*a) - "null" - end - - def ==(other) - other.class == self.class - end - end - - class FlagPreprocessed < PreprocessedDataBase - def initialize(off_result, fallthrough_factory) - super() - @off_result = off_result - @fallthrough_factory = fallthrough_factory - end - - # @return [EvalResultsForSingleVariation] - attr_reader :off_result - # @return [EvalResultFactoryMultiVariations] - attr_reader :fallthrough_factory - end - - class PrerequisitePreprocessed < PreprocessedDataBase - def initialize(failed_result) - super() - @failed_result = failed_result - end - - # @return [EvalResultsForSingleVariation] - attr_reader :failed_result - end - - class TargetPreprocessed < PreprocessedDataBase - def initialize(match_result) - super() - @match_result = match_result - end - - # @return [EvalResultsForSingleVariation] - attr_reader :match_result - end - - class FlagRulePreprocessed < PreprocessedDataBase - def initialize(all_match_results) - super() - @all_match_results = all_match_results - end - - # @return [EvalResultsForSingleVariation] - attr_reader :all_match_results - end - class Preprocessor - def initialize(logger = nil) - @logger = logger - end - - def preprocess_item!(kind, item) - if kind.eql? FEATURES - preprocess_flag!(item) - elsif kind.eql? SEGMENTS - preprocess_segment!(item) - end - end - - def preprocess_all_items!(kind, items_map) - return items_map unless items_map - items_map.each do |key, item| - preprocess_item!(kind, item) - end - end - - def preprocess_flag!(flag) - flag[:_preprocessed] = FlagPreprocessed.new( - EvaluatorHelpers.off_result(flag), - precompute_multi_variation_results(flag, EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true)) - ) - (flag[:prerequisites] || []).each do |prereq| - preprocess_prerequisite!(prereq, flag) - end - (flag[:targets] || []).each do |target| - preprocess_target!(target, flag) - end - rules = flag[:rules] - (rules || []).each_index do |index| - preprocess_flag_rule!(rules[index], index, flag) - end - end - - def preprocess_segment!(segment) - # nothing to do for segments currently - end - - private def preprocess_prerequisite!(prereq, flag) - prereq[:_preprocessed] = PrerequisitePreprocessed.new( - EvaluatorHelpers.prerequisite_failed_result(prereq, flag, @logger) - ) - end - - private def preprocess_target!(target, flag) - target[:_preprocessed] = TargetPreprocessed.new( - EvaluatorHelpers.target_match_result(target, flag, @logger) - ) - end - - private def preprocess_flag_rule!(rule, index, flag) - match_reason = EvaluationReason::rule_match(index, rule[:id]) - match_reason_in_experiment = EvaluationReason::rule_match(index, rule[:id], true) - rule[:_preprocessed] = FlagRulePreprocessed.new( - precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment) - ) - end - - private def precompute_multi_variation_results(flag, regular_reason, in_experiment_reason) + # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] + # @param regular_reason [LaunchDarkly::EvaluationReason] + # @param in_experiment_reason [LaunchDarkly::EvaluationReason] + # @return [EvalResultFactoryMultiVariations] + def self.precompute_multi_variation_results(flag, regular_reason, in_experiment_reason) factories = [] vars = flag[:variations] || [] vars.each_index do |index| diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb new file mode 100644 index 00000000..36d873fc --- /dev/null +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -0,0 +1,119 @@ +require "ldclient-rb/impl/model/clause" +require "ldclient-rb/impl/model/preprocessed_data" + +module LaunchDarkly + module Impl + module Model + class Segment + # @param data [Hash] + # @param logger [Logger|nil] + def initialize(data, logger = nil) + raise ArgumentError, "expected hash but got #{data.class}" unless data.is_a?(Hash) + @data = data + @key = data[:key] + @version = data[:version] + @deleted = !!data[:deleted] + return if @deleted + @included = data[:included] || [] + @excluded = data[:excluded] || [] + @included_contexts = (data[:includedContexts] || []).map do |target_data| + SegmentTarget.new(target_data) + end + @excluded_contexts = (data[:excludedContexts] || []).map do |target_data| + SegmentTarget.new(target_data) + end + @rules = (data[:rules] || []).map do |rule_data| + SegmentRule.new(rule_data) + end + @unbounded = !!data[:unbounded] + @generation = data[:generation] + @salt = data[:salt] + end + + # @return [Hash] + attr_reader :data + # @return [String] + attr_reader :key + # @return [Integer] + attr_reader :version + # @return [Boolean] + attr_reader :deleted + # @return [Array] + attr_reader :included + # @return [Array] + attr_reader :excluded + # @return [Array] + attr_reader :included_contexts + # @return [Array] + attr_reader :excluded_contexts + # @return [Array] + attr_reader :rules + # @return [Boolean] + attr_reader :unbounded + # @return [Integer|nil] + attr_reader :generation + # @return [String] + attr_reader :salt + + # This method allows us to read properties of the object as if it's just a hash; we can remove it if we + # migrate entirely to using attributes of the class + def [](key) + @data[key] + end + + def ==(other) + other.is_a?(Segment) && other.data == self.data + end + + def as_json(*) # parameter is unused, but may be passed if we're using the json gem + @data + end + + # Same as as_json, but converts the JSON structure into a string. + def to_json(*a) + as_json.to_json(a) + end + end + + class SegmentTarget + def initialize(data) + @data = data + @context_kind = data[:contextKind] + @values = data[:values] || [] + end + + # @return [Hash] + attr_reader :data + # @return [String] + attr_reader :context_kind + # @return [Array] + attr_reader :values + end + + class SegmentRule + def initialize(data) + @data = data + @clauses = (data[:clauses] || []).map do |clause_data| + Clause.new(clause_data) + end + @weight = data[:weight] + @bucket_by = data[:bucketBy] + @rollout_context_kind = data[:rolloutContextKind] + end + + # @return [Hash] + attr_reader :data + # @return [Array] + attr_reader :clauses + # @return [Integer|nil] + attr_reader :weight + # @return [String|nil] + attr_reader :bucket_by + # @return [String|nil] + attr_reader :rollout_context_kind + end + + # Clause is defined in its own file because clauses are used by both flags and segments + end + end +end diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index 1d306f46..9f3e4c28 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -1,15 +1,30 @@ +require "ldclient-rb/impl/model/feature_flag" require "ldclient-rb/impl/model/preprocessed_data" +require "ldclient-rb/impl/model/segment" module LaunchDarkly module Impl module Model # Abstraction of deserializing a feature flag or segment that was read from a data store or # received from LaunchDarkly. - def self.deserialize(kind, json, logger = nil) - return nil if json.nil? - item = JSON.parse(json, symbolize_names: true) - DataModelPreprocessing::Preprocessor.new(logger).preprocess_item!(kind, item) - item + # + # @param kind [Hash] normally either FEATURES or SEGMENTS + # @param input [object] a JSON string or a parsed hash (or a data model object, in which case + # we'll just return the original object) + # @param logger [Logger|nil] logs warnings if there are any data validation problems + # @return [Object] the flag or segment (or, for an unknown data kind, the data as a hash) + def self.deserialize(kind, input, logger = nil) + return nil if input.nil? + return input if !input.is_a?(String) && !input.is_a?(Hash) + data = input.is_a?(Hash) ? input : JSON.parse(input, symbolize_names: true) + case kind + when FEATURES + FeatureFlag.new(data, logger) + when SEGMENTS + Segment.new(data, logger) + else + data + end end # Abstraction of serializing a feature flag or segment that will be written to a data store. @@ -20,12 +35,10 @@ def self.serialize(kind, item) # Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format. def self.make_all_store_data(received_data, logger = nil) - preprocessor = DataModelPreprocessing::Preprocessor.new(logger) - flags = received_data[:flags] - preprocessor.preprocess_all_items!(FEATURES, flags) - segments = received_data[:segments] - preprocessor.preprocess_all_items!(SEGMENTS, segments) - { FEATURES => flags, SEGMENTS => segments } + return { + FEATURES => (received_data[:flags] || {}).transform_values { |data| FeatureFlag.new(data, logger) }, + SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) } + } end end end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 83a343b6..f8f0ced4 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -1,4 +1,6 @@ require 'ldclient-rb/impl/integrations/test_data/test_data_source' +require 'ldclient-rb/impl/model/feature_flag' +require 'ldclient-rb/impl/model/segment' require 'ldclient-rb/integrations/test_data/flag_builder' require 'concurrent/atomics' @@ -119,7 +121,7 @@ def update(flag_builder) if @current_flags[flag_key] then version = @current_flags[flag_key][:version] end - new_flag = flag_builder.build(version+1) + new_flag = Impl::Model::FeatureFlag.new(flag_builder.build(version+1)) @current_flags[flag_key] = new_flag end update_item(FEATURES, new_flag) @@ -169,12 +171,14 @@ def use_preconfigured_segment(segment) end private def use_preconfigured_item(kind, item, current) - key = item[:key].to_sym + item = Impl::Model.deserialize(kind, item) + key = item.key.to_sym @lock.with_write_lock do old_item = current[key] unless old_item.nil? then - item = item.clone - item[:version] = old_item[:version] + 1 + data = item.as_json + data[:version] = old_item.version + 1 + item = Impl::Model.deserialize(kind, data) end current[key] = item end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index e3824538..d6ab5086 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -96,9 +96,8 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - data = data[:data] - Impl::DataModelPreprocessing::Preprocessor.new(@config.logger).preprocess_item!(kind, data) - @feature_store.upsert(kind, data) + item = Impl::Model.deserialize(kind, data[:data], @config.logger) + @feature_store.upsert(kind, item) break end end diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index 0f246e4d..4fca96ca 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -5,10 +5,10 @@ module LaunchDarkly module Impl - evaluator_tests_with_and_without_preprocessing "Evaluator (big segments)" do |desc, factory| - describe "#{desc} - evaluate", :evaluator_spec_base => true do + describe "Evaluator (big segments)" do + describe "evaluate", :evaluator_spec_base => true do it "segment is not matched if there is no way to query it" do - segment = factory.segment({ + segment = Segments.from_hash({ key: 'test', included: [ user.key ], # included should be ignored for a big segment version: 1, @@ -18,14 +18,14 @@ module Impl e = EvaluatorBuilder.new(logger) .with_segment(segment) .build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) result = e.evaluate(flag, user) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) end it "segment with no generation is not matched" do - segment = factory.segment({ + segment = Segments.from_hash({ key: 'test', included: [ user.key ], # included should be ignored for a big segment version: 1, @@ -34,14 +34,14 @@ module Impl e = EvaluatorBuilder.new(logger) .with_segment(segment) .build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) result = e.evaluate(flag, user) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) end it "matched with include" do - segment = factory.segment({ + segment = Segments.from_hash({ key: 'test', version: 1, unbounded: true, @@ -51,54 +51,54 @@ module Impl .with_segment(segment) .with_big_segment_for_user(user, segment, true) .build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end it "matched with rule" do - segment = factory.segment({ + segment = Segments.from_hash({ key: 'test', version: 1, unbounded: true, generation: 2, rules: [ - { clauses: [ make_user_matching_clause(user) ] }, + { clauses: [ Clauses.match_user(user) ] }, ], }) e = EvaluatorBuilder.new(logger) .with_segment(segment) .with_big_segment_for_user(user, segment, nil) .build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end it "unmatched by exclude regardless of rule" do - segment = factory.segment({ + segment = Segments.from_hash({ key: 'test', version: 1, unbounded: true, generation: 2, rules: [ - { clauses: [ make_user_matching_clause(user) ] }, + { clauses: [ Clauses.match_user(user) ] }, ], }) e = EvaluatorBuilder.new(logger) .with_segment(segment) .with_big_segment_for_user(user, segment, false) .build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) result = e.evaluate(flag, user) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end it "status is returned from provider" do - segment = factory.segment({ + segment = Segments.from_hash({ key: 'test', version: 1, unbounded: true, @@ -109,33 +109,33 @@ module Impl .with_big_segment_for_user(user, segment, true) .with_big_segments_status(BigSegmentsStatus::STALE) .build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) result = e.evaluate(flag, user) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) end it "queries state only once per user even if flag references multiple segments" do - segment1 = factory.segment({ + segment1 = Segments.from_hash({ key: 'segmentkey1', version: 1, unbounded: true, generation: 2, }) - segment2 = factory.segment({ + segment2 = Segments.from_hash({ key: 'segmentkey2', version: 1, unbounded: true, generation: 3, }) - flag = factory.flag({ + flag = Flags.from_hash({ key: 'key', on: true, fallthrough: { variation: 0 }, variations: [ false, true ], rules: [ - { variation: 1, clauses: [ make_segment_match_clause(segment1) ]}, - { variation: 1, clauses: [ make_segment_match_clause(segment2) ]}, + { variation: 1, clauses: [ Clauses.match_segment(segment1) ]}, + { variation: 1, clauses: [ Clauses.match_segment(segment2) ]}, ], }) diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb index 75eb277f..e6245c7e 100644 --- a/spec/impl/evaluator_clause_spec.rb +++ b/spec/impl/evaluator_clause_spec.rb @@ -3,33 +3,33 @@ module LaunchDarkly module Impl - evaluator_tests_with_and_without_preprocessing "Evaluator (clauses)" do |desc, factory| - describe "#{desc} - evaluate", :evaluator_spec_base => true do + describe "Evaluator (clauses)" do + describe "evaluate", :evaluator_spec_base => true do it "can match built-in attribute" do context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context).detail.value).to be true end it "can match custom attribute" do context = LDContext.create({ key: 'x', name: 'Bob', custom: { legs: 4 } }) clause = { attribute: 'legs', op: 'in', values: [4] } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context).detail.value).to be true end it "returns false for missing attribute" do context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'legs', op: 'in', values: [4] } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end it "returns false for unknown operator" do context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'name', op: 'unknown', values: [4] } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end @@ -39,14 +39,14 @@ module Impl rule0 = { clauses: [ clause0 ], variation: 1 } clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } rule1 = { clauses: [ clause1 ], variation: 1 } - flag = factory.boolean_flag_with_rules([rule0, rule1]) + flag = Flags.boolean_flag_with_rules(rule0, rule1) expect(basic_evaluator.evaluate(flag, context).detail.value).to be true end it "can be negated" do context = LDContext.create({ key: 'x', name: 'Bob' }) clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context).detail.value).to be false end @@ -57,7 +57,7 @@ module Impl context2 = LDContext.create({ key: 'l', kind: 'user', name: 'Lucy' }) context3 = LDContext.create_multi([context1, context2]) - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context1).detail.value).to be true expect(basic_evaluator.evaluate(flag, context2).detail.value).to be false @@ -76,7 +76,7 @@ module Impl ] ) - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(basic_evaluator.evaluate(flag, context1).detail.value).to be false expect(basic_evaluator.evaluate(flag, context2).detail.value).to be true diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 6e06a465..11c07ecb 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -3,11 +3,11 @@ module LaunchDarkly module Impl - evaluator_tests_with_and_without_preprocessing "Evaluator (rules)" do |desc, factory| - describe "#{desc} - evaluate", :evaluator_spec_base => true do + describe "Evaluator (rules)" do + describe "evaluate", :evaluator_spec_base => true do it "matches context from rules" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) result = basic_evaluator.evaluate(flag, context) @@ -15,22 +15,20 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses rule match result detail instances" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = factory.boolean_flag_with_rules([rule]) - context = LDContext.create({ key: 'userkey' }) - detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result1 = basic_evaluator.evaluate(flag, context) - result2 = basic_evaluator.evaluate(flag, context) - expect(result1.detail.reason.rule_id).to eq 'ruleid' - expect(result1.detail).to be result2.detail - end + it "reuses rule match result detail instances" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = Flags.boolean_flag_with_rules(rule) + context = LDContext.create({ key: 'userkey' }) + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) + expect(result1.detail.reason.rule_id).to eq 'ruleid' + expect(result1.detail).to be result2.detail end it "returns an error if rule variation is too high" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -41,7 +39,7 @@ module Impl it "returns an error if rule variation is negative" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -52,7 +50,7 @@ module Impl it "returns an error if rule has neither variation nor rollout" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -64,7 +62,7 @@ module Impl it "returns an error if rule has a rollout with no variations" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { variations: [] } } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) @@ -75,7 +73,7 @@ module Impl it "coerces context key to a string for evaluation" do clause = { attribute: 'key', op: 'in', values: ['999'] } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) context = LDContext.create({ key: 999 }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.value).to eq(true) @@ -85,7 +83,7 @@ module Impl it "evaluates rollout for rule" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: 'userkey' }) detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) result = basic_evaluator.evaluate(flag, context) @@ -93,24 +91,22 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses rule rollout result detail instance" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } - flag = factory.boolean_flag_with_rules([rule]) - context = LDContext.create({ key: 'userkey' }) - detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) - result1 = basic_evaluator.evaluate(flag, context) - result2 = basic_evaluator.evaluate(flag, context) - expect(result1.detail).to eq(detail) - expect(result2.detail).to be(result1.detail) - end + it "reuses rule rollout result detail instance" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = Flags.boolean_flag_with_rules(rule) + context = LDContext.create({ key: 'userkey' }) + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) end it "sets the in_experiment value if rollout kind is experiment " do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: "userkey" }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to include('"inExperiment":true') @@ -120,7 +116,7 @@ module Impl it "does not set the in_experiment value if rollout kind is not experiment " do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: "userkey" }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') @@ -130,7 +126,7 @@ module Impl it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } - flag = factory.boolean_flag_with_rules([rule]) + flag = Flags.boolean_flag_with_rules(rule) context = LDContext.create({ key: "userkey" }) result = basic_evaluator.evaluate(flag, context) expect(result.detail.reason.to_json).to_not include('"inExperiment":true') diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 04856522..a5104e7d 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -1,13 +1,14 @@ +require "model_builders" require "spec_helper" require "impl/evaluator_spec_base" module LaunchDarkly module Impl - evaluator_tests_with_and_without_preprocessing "Evaluator (segments)" do |desc, factory| - describe "#{desc} - evaluate", :evaluator_spec_base => true do - def test_segment_match(factory, segment, context) - clause = make_segment_match_clause(segment, context.individual_context(0).kind) - flag = factory.boolean_flag_with_clauses([clause]) + describe "Evaluator (segments)" do + describe "evaluate", :evaluator_spec_base => true do + def test_segment_match(segment, context) + clause = Clauses.match_segment(segment) + flag = Flags.boolean_flag_with_clauses(clause) e = EvaluatorBuilder.new(logger).with_segment(segment).build e.evaluate(flag, context).detail.value end @@ -20,21 +21,20 @@ def test_segment_match(factory, segment, context) deleted: false, } e = EvaluatorBuilder.new(logger).with_segment(segment).build - flag = factory.boolean_flag_with_clauses([make_segment_match_clause(segment)]) + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) expect(e.evaluate(flag, user).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = factory.boolean_flag_with_clauses([clause]) + flag = Flags.boolean_flag_with_clauses(clause) expect(e.evaluate(flag, user).detail.value).to be false end it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user.key ] - expect(test_segment_match(factory, segment, user)).to be true + segment = SegmentBuilder.new('segkey').included(user.key).build + expect(test_segment_match(segment, user)).to be true end it 'explicitly includes a specific context kind' do @@ -42,18 +42,18 @@ def test_segment_match(factory, segment, context) device_context = LDContext::create({ key: "devicekey", kind: "device" }) multi_context = LDContext::create_multi([org_context, device_context]) - segment = make_segment('segkey') - segment[:includedContexts] = [{ contextKind: "org", values: ["orgkey"] }] + segment = SegmentBuilder.new('segkey') + .included_contexts("org", "orgkey") + .build - expect(test_segment_match(factory, segment, org_context)).to be true - expect(test_segment_match(factory, segment, device_context)).to be false - expect(test_segment_match(factory, segment, multi_context)).to be true + expect(test_segment_match(segment, org_context)).to be true + expect(test_segment_match(segment, device_context)).to be false + expect(test_segment_match(segment, multi_context)).to be true end it 'explicitly excludes user' do - segment = make_segment('segkey') - segment[:excluded] = [ user.key ] - expect(test_segment_match(factory, segment, user)).to be false + segment = SegmentBuilder.new('segkey').excluded(user.key).build + expect(test_segment_match(segment, user)).to be false end it 'explicitly excludes a specific context kind' do @@ -61,89 +61,82 @@ def test_segment_match(factory, segment, context) device_context = LDContext::create({ key: "devicekey", kind: "device" }) multi_context = LDContext::create_multi([org_context, device_context]) - segment = make_segment('segkey') - segment[:excludedContexts] = [{ contextKind: "org", values: ["orgkey"] }] - - org_clause = make_user_matching_clause(org_context, :key) - device_clause = make_user_matching_clause(device_context, :key) - segment[:rules] = [ { clauses: [ org_clause ] }, { clauses: [ device_clause ] } ] - - expect(test_segment_match(factory, segment, org_context)).to be false - expect(test_segment_match(factory, segment, device_context)).to be true - expect(test_segment_match(factory, segment, multi_context)).to be false + org_clause = Clauses.match_user(org_context, :key) + device_clause = Clauses.match_user(device_context, :key) + segment = SegmentBuilder.new('segkey') + .excluded_contexts("org", "orgkey") + .rule({ clauses: [ org_clause ]}) + .rule({ clauses: [ device_clause ]}) + .build + + expect(test_segment_match(segment, org_context)).to be false + expect(test_segment_match(segment, device_context)).to be true + expect(test_segment_match(segment, multi_context)).to be false end it 'both includes and excludes user; include takes priority' do - segment = make_segment('segkey') - segment[:included] = [ user.key ] - segment[:excluded] = [ user.key ] - expect(test_segment_match(factory, segment, user)).to be true + segment = SegmentBuilder.new('segkey').included(user.key).excluded(user.key).build + expect(test_segment_match(segment, user)).to be true end it 'matches user by rule when weight is absent' do - segClause = make_user_matching_clause(user, :email) + segClause = Clauses.match_user(user, :email) segRule = { clauses: [ segClause ], } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment, user)).to be true + segment = SegmentBuilder.new('segkey').rule(segRule).build + expect(test_segment_match(segment, user)).to be true end it 'matches user by rule when weight is nil' do - segClause = make_user_matching_clause(user, :email) + segClause = Clauses.match_user(user, :email) segRule = { clauses: [ segClause ], weight: nil, } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment, user)).to be true + segment = SegmentBuilder.new('segkey').rule(segRule).build + expect(test_segment_match(segment, user)).to be true end it 'matches user with full rollout' do - segClause = make_user_matching_clause(user, :email) + segClause = Clauses.match_user(user, :email) segRule = { clauses: [ segClause ], weight: 100000, } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment, user)).to be true + segment = SegmentBuilder.new('segkey').rule(segRule).build + expect(test_segment_match(segment, user)).to be true end it "doesn't match user with zero rollout" do - segClause = make_user_matching_clause(user, :email) + segClause = Clauses.match_user(user, :email) segRule = { clauses: [ segClause ], weight: 0, } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment, user)).to be false + segment = SegmentBuilder.new('segkey').rule(segRule).build + expect(test_segment_match(segment, user)).to be false end it "matches user with multiple clauses" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) + segClause1 = Clauses.match_user(user, :email) + segClause2 = Clauses.match_user(user, :name) segRule = { clauses: [ segClause1, segClause2 ], } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment, user)).to be true + segment = SegmentBuilder.new('segkey').rule(segRule).build + expect(test_segment_match(segment, user)).to be true end it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) + segClause1 = Clauses.match_user(user, :email) + segClause2 = Clauses.match_user(user, :name) segClause2[:values] = [ 'wrong' ] segRule = { clauses: [ segClause1, segClause2 ], } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(factory, segment, user)).to be false + segment = SegmentBuilder.new('segkey').rule(segRule).build + expect(test_segment_match(segment, user)).to be false end end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index c43eb195..57786026 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -5,10 +5,10 @@ module LaunchDarkly module Impl - evaluator_tests_with_and_without_preprocessing "Evaluator (general)" do |desc, factory| - describe "#{desc} - evaluate", :evaluator_spec_base => true do + describe "Evaluator (general)" do + describe "evaluate", :evaluator_spec_base => true do it "returns off variation if flag is off" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: false, offVariation: 1, @@ -23,7 +23,7 @@ module Impl end it "returns nil if flag is off and off variation is unspecified" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: false, fallthrough: { variation: 0 }, @@ -36,26 +36,24 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses off result detail instance" do - flag = factory.flag({ - key: 'feature', - on: false, - offVariation: 1, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'], - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('b', 1, EvaluationReason::off) - result1 = basic_evaluator.evaluate(flag, context) - result2 = basic_evaluator.evaluate(flag, context) - expect(result1.detail).to eq(detail) - expect(result2.detail).to be(result1.detail) - end + it "reuses off result detail instance" do + flag = Flags.from_hash({ + key: 'feature', + on: false, + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'], + }) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::off) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) end it "returns an error if off variation is too high" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: false, offVariation: 999, @@ -71,7 +69,7 @@ module Impl end it "returns an error if off variation is negative" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: false, offVariation: -1, @@ -87,7 +85,7 @@ module Impl end it "returns off variation if prerequisite is not found" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, prerequisites: [{key: 'badfeature', variation: 1}], @@ -103,27 +101,25 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses prerequisite-failed result detail instances" do - flag = factory.flag({ - key: 'feature0', - on: true, - prerequisites: [{key: 'badfeature', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - }) - context = LDContext.create({ key: 'x' }) - e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result1 = e.evaluate(flag, context) - expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') - result2 = e.evaluate(flag, context) - expect(result2.detail).to be result1.detail - end + it "reuses prerequisite-failed result detail instances" do + flag = Flags.from_hash({ + key: 'feature0', + on: true, + prerequisites: [{key: 'badfeature', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + }) + context = LDContext.create({ key: 'x' }) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build + result1 = e.evaluate(flag, context) + expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') + result2 = e.evaluate(flag, context) + expect(result2.detail).to be result1.detail end it "returns off variation and event if prerequisite of a prerequisite is not found" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -132,7 +128,7 @@ module Impl variations: ['a', 'b', 'c'], version: 1, }) - flag1 = factory.flag({ + flag1 = Flags.from_hash({ key: 'feature1', on: true, prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist @@ -152,7 +148,7 @@ module Impl end it "returns off variation and event if prerequisite is off" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -161,7 +157,7 @@ module Impl variations: ['a', 'b', 'c'], version: 1, }) - flag1 = factory.flag({ + flag1 = Flags.from_hash({ key: 'feature1', on: false, # note that even though it returns the desired variation, it is still off and therefore not a match @@ -182,7 +178,7 @@ module Impl end it "returns off variation and event if prerequisite is not met" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -191,7 +187,7 @@ module Impl variations: ['a', 'b', 'c'], version: 1, }) - flag1 = factory.flag({ + flag1 = Flags.from_hash({ key: 'feature1', on: true, fallthrough: { variation: 0 }, @@ -210,7 +206,7 @@ module Impl end it "returns fallthrough variation and event if prerequisite is met and there are no rules" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, prerequisites: [{key: 'feature1', variation: 1}], @@ -219,7 +215,7 @@ module Impl variations: ['a', 'b', 'c'], version: 1, }) - flag1 = factory.flag({ + flag1 = Flags.from_hash({ key: 'feature1', on: true, fallthrough: { variation: 1 }, @@ -238,7 +234,7 @@ module Impl end it "returns fallthrough variation if flag is on and no rules match" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, fallthrough: { variation: 0 }, @@ -256,30 +252,28 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses fallthrough variation result detail instance" do - flag = factory.flag({ - key: 'feature0', - on: true, - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1, - rules: [ - { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] }, - ], - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) - result1 = basic_evaluator.evaluate(flag, context) - result2 = basic_evaluator.evaluate(flag, context) - expect(result1.detail).to eq(detail) - expect(result2.detail).to be(result1.detail) - end + it "reuses fallthrough variation result detail instance" do + flag = Flags.from_hash({ + key: 'feature0', + on: true, + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1, + rules: [ + { variation: 2, clauses: [ { attribute: "key", op: "in", values: ["zzz"] } ] }, + ], + }) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) end it "returns an error if fallthrough variation is too high" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { variation: 999 }, @@ -294,7 +288,7 @@ module Impl end it "returns an error if fallthrough variation is negative" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { variation: -1 }, @@ -309,7 +303,7 @@ module Impl end it "returns an error if fallthrough has no variation or rollout" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { }, @@ -324,7 +318,7 @@ module Impl end it "returns an error if fallthrough has a rollout with no variations" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { rollout: { variations: [] } }, @@ -339,7 +333,7 @@ module Impl end it "matches context from targets" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, targets: [ @@ -356,30 +350,28 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses target-match result detail instances" do - flag = factory.flag({ - key: 'feature', - on: true, - targets: [ - { values: [ 'whoever', 'userkey' ], variation: 2 }, - ], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - }) - context = LDContext.create({ key: 'userkey' }) - detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) - result1 = basic_evaluator.evaluate(flag, context) - result2 = basic_evaluator.evaluate(flag, context) - expect(result1.detail).to eq(detail) - expect(result2.detail).to be(result1.detail) - end + it "reuses target-match result detail instances" do + flag = Flags.from_hash({ + key: 'feature', + on: true, + targets: [ + { values: [ 'whoever', 'userkey' ], variation: 2 }, + ], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + }) + context = LDContext.create({ key: 'userkey' }) + detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) end describe "fallthrough experiment/rollout behavior" do it "evaluates rollout for fallthrough" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature0', on: true, fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, @@ -394,27 +386,25 @@ module Impl expect(result.prereq_evals).to eq(nil) end - if factory.with_preprocessing - it "reuses fallthrough rollout result detail instance" do - flag = factory.flag({ - key: 'feature0', - on: true, - fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1, - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) - result1 = basic_evaluator.evaluate(flag, context) - result2 = basic_evaluator.evaluate(flag, context) - expect(result1.detail).to eq(detail) - expect(result2.detail).to be(result1.detail) - end + it "reuses fallthrough rollout result detail instance" do + flag = Flags.from_hash({ + key: 'feature0', + on: true, + fallthrough: { rollout: { variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1, + }) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::fallthrough) + result1 = basic_evaluator.evaluate(flag, context) + result2 = basic_evaluator.evaluate(flag, context) + expect(result1.detail).to eq(detail) + expect(result2.detail).to be(result1.detail) end it "sets the in_experiment value if rollout kind is experiment and untracked false" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, @@ -428,7 +418,7 @@ module Impl end it "does not set the in_experiment value if rollout kind is not experiment" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, @@ -442,7 +432,7 @@ module Impl end it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do - flag = factory.flag({ + flag = Flags.from_hash({ key: 'feature', on: true, fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } }, diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 6f0f6ecb..808da1f1 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -1,22 +1,9 @@ require "ldclient-rb/impl/big_segments" +require "ldclient-rb/impl/model/serialization" require "model_builders" require "spec_helper" -def evaluator_tests_with_and_without_preprocessing(desc_base) - # In the evaluator tests, we are really testing two sets of evaluation logic: one where preprocessed - # results are not available, and one where they are. In normal usage, flags always get preprocessed and - # we expect evaluations to almost always be able to reuse a preprocessed result-- but we still want to - # verify that the evaluator works even if preprocessing hasn't happened, since a flag is just a Hash and - # so we can't do any type-level enforcement to constrain its state. The DataItemFactory abstraction - # controls whether flags/segments created in these tests do or do not have preprocessing applied. - [true, false].each do |with_preprocessing| - pre_desc = with_preprocessing ? "with preprocessing" : "without preprocessing" - desc = "#{desc_base} - #{pre_desc}" - yield desc, DataItemFactory.new(with_preprocessing) - end -end - module LaunchDarkly module Impl class EvaluatorBuilder @@ -30,7 +17,7 @@ def initialize(logger) end def with_flag(flag) - @flags[flag[:key]] = flag + @flags[flag[:key]] = Model.deserialize(FEATURES, flag) self end @@ -40,7 +27,7 @@ def with_unknown_flag(key) end def with_segment(segment) - @segments[segment[:key]] = segment + @segments[segment[:key]] = Model.deserialize(SEGMENTS, segment) self end @@ -105,35 +92,6 @@ def logger def basic_evaluator EvaluatorBuilder.new(logger).build end - - def make_user_matching_clause(user, attr = :key) - { - attribute: attr.to_s, - op: :in, - values: [ user.get_value(attr) ], - negate: false, - contextKind: user.individual_context(0).kind, - } - end - - def make_segment(key) - { - key: key, - included: [], - excluded: [], - salt: 'abcdef', - version: 1, - } - end - - def make_segment_match_clause(segment, kind = nil) - { - op: :segmentMatch, - values: [ segment[:key] ], - negate: false, - contextKind: kind || LaunchDarkly::LDContext::KIND_DEFAULT, - } - end end RSpec.configure { |c| c.include EvaluatorSpecBase, :evaluator_spec_base => true } diff --git a/spec/impl/model/preprocessed_data_spec.rb b/spec/impl/model/preprocessed_data_spec.rb index 7b2c9fa7..753590d5 100644 --- a/spec/impl/model/preprocessed_data_spec.rb +++ b/spec/impl/model/preprocessed_data_spec.rb @@ -1,11 +1,7 @@ +require "ldclient-rb/impl/model/feature_flag" require "model_builders" require "spec_helper" -def strip_preprocessed_nulls(json) - # currently we can't avoid emitting these null properties - we just don't want to see anything other than null there - json.gsub('"_preprocessed":null,', '').gsub(',"_preprocessed":null', '') -end - module LaunchDarkly module Impl module DataModelPreprocessing @@ -33,10 +29,9 @@ module DataModelPreprocessing }, ], } - flag = clone_json_object(original_flag) - Preprocessor.new().preprocess_flag!(flag) + flag = Model::FeatureFlag.new(original_flag) json = Model.serialize(FEATURES, flag) - parsed = JSON.parse(strip_preprocessed_nulls(json), symbolize_names: true) + parsed = JSON.parse(json, symbolize_names: true) expect(parsed).to eq(original_flag) end end diff --git a/spec/impl/model/serialization_spec.rb b/spec/impl/model/serialization_spec.rb index 0d6fa4de..f2d364eb 100644 --- a/spec/impl/model/serialization_spec.rb +++ b/spec/impl/model/serialization_spec.rb @@ -5,40 +5,30 @@ module LaunchDarkly module Impl module Model describe "model serialization" do - factory = DataItemFactory.new(true) # true = enable the usual preprocessing logic - it "serializes flag" do - flag = { key: "flagkey", version: 1 } + flag = FlagBuilder.new("flagkey").version(1).build json = Model.serialize(FEATURES, flag) - expect(JSON.parse(json, symbolize_names: true)).to eq flag + expect(JSON.parse(json, symbolize_names: true)).to eq flag.data end it "serializes segment" do - segment = { key: "segkey", version: 1 } + segment = SegmentBuilder.new("segkey").version(1).build json = Model.serialize(SEGMENTS, segment) - expect(JSON.parse(json, symbolize_names: true)).to eq segment - end - - it "serializes arbitrary data kind" do - thing = { key: "thingkey", name: "me" } - json = Model.serialize({ name: "things" }, thing) - expect(JSON.parse(json, symbolize_names: true)).to eq thing + expect(JSON.parse(json, symbolize_names: true)).to eq segment.data end it "deserializes flag with no rules or prerequisites" do flag_in = { key: "flagkey", version: 1 } - flag_preprocessed = factory.flag(flag_in) - json = Model.serialize(FEATURES, flag_preprocessed) - flag_out = Model.deserialize(FEATURES, json) - expect(flag_out).to eq flag_preprocessed + json = flag_in.to_json + flag_out = Model.deserialize(FEATURES, json, nil) + expect(flag_out.data).to eq flag_in end it "deserializes segment" do segment_in = { key: "segkey", version: 1 } - segment_preprocessed = factory.segment(segment_in) - json = Model.serialize(SEGMENTS, segment_preprocessed) - segment_out = Model.deserialize(SEGMENTS, json) - expect(segment_out).to eq factory.segment(segment_preprocessed) + json = segment_in.to_json + segment_out = Model.deserialize(SEGMENTS, json, nil) + expect(segment_out.data).to eq segment_in end end end diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb index f72f33b9..d8888a1e 100644 --- a/spec/integrations/test_data_spec.rb +++ b/spec/integrations/test_data_spec.rb @@ -16,7 +16,7 @@ module Integrations td.update(td.flag('flag')) config = Config.new(send_events: false, data_source: td) client = LDClient.new('sdkKey', config) - expect(config.feature_store.get(FEATURES, 'flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, @@ -35,7 +35,7 @@ module Integrations config2 = Config.new(send_events: false, data_source: td) client2 = LDClient.new('sdkKey', config2) - expect(config.feature_store.get(FEATURES, 'flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, @@ -43,7 +43,7 @@ module Integrations on: true, version: 1, }) - expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ + expect(config2.feature_store.get(FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, @@ -54,7 +54,7 @@ module Integrations td.update(td.flag('flag').variation_for_all_users(false)) - expect(config.feature_store.get(FEATURES, 'flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 1 }, @@ -62,7 +62,7 @@ module Integrations on: true, version: 2, }) - expect(config2.feature_store.get(FEATURES, 'flag')).to eql({ + expect(config2.feature_store.get(FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 1 }, @@ -83,22 +83,22 @@ module Integrations config = Config.new(send_events: false, data_source: td) client = LDClient.new('sdkKey', config) - expect(config.feature_store.get(FEATURES, 'my-flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'my-flag').data).to eql({ key: 'my-flag', version: 1000, on: true }) - expect(config.feature_store.get(SEGMENTS, 'my-segment')).to eql({ + expect(config.feature_store.get(SEGMENTS, 'my-segment').data).to eql({ key: 'my-segment', version: 2000 }) td.use_preconfigured_flag({ key: 'my-flag', on: false }) - expect(config.feature_store.get(FEATURES, 'my-flag')).to eql({ + expect(config.feature_store.get(FEATURES, 'my-flag').data).to eql({ key: 'my-flag', version: 1001, on: false }) td.use_preconfigured_segment({ key: 'my-segment', included: [ 'x' ] }) - expect(config.feature_store.get(SEGMENTS, 'my-segment')).to eql({ + expect(config.feature_store.get(SEGMENTS, 'my-segment').data).to eql({ key: 'my-segment', version: 2001, included: [ 'x' ] }) diff --git a/spec/model_builders.rb b/spec/model_builders.rb index 19e38c0a..72a69c27 100644 --- a/spec/model_builders.rb +++ b/spec/model_builders.rb @@ -1,43 +1,45 @@ -require "ldclient-rb/impl/model/preprocessed_data" +require "ldclient-rb/impl/model/feature_flag" +require "ldclient-rb/impl/model/segment" require "json" -def clone_json_object(o) - JSON.parse(o.to_json, symbolize_names: true) -end - -class DataItemFactory - def initialize(with_preprocessing) - @with_preprocessing = with_preprocessing - end - - def flag(flag_data) - @with_preprocessing ? preprocessed_flag(flag_data) : flag_data +class Flags + def self.from_hash(data) + LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::FEATURES, data) end - def segment(segment_data) - @with_preprocessing ? preprocessed_segment(segment_data) : segment_data + def self.boolean_flag_with_rules(*rules) + builder = FlagBuilder.new("feature").on(true).variations(false, true).fallthrough_variation(0) + rules.each { |r| builder.rule(r) } + builder.build end - def boolean_flag_with_rules(rules) - flag({ key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] }) + def self.boolean_flag_with_clauses(*clauses) + self.boolean_flag_with_rules({ id: 'ruleid', clauses: clauses, variation: 1 }) end +end - def boolean_flag_with_clauses(clauses) - flag(boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }])) +class Segments + def self.from_hash(data) + LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::SEGMENTS, data) end +end - attr_reader :with_preprocessing - - private def preprocessed_flag(o) - ret = clone_json_object(o) - LaunchDarkly::Impl::DataModelPreprocessing::Preprocessor.new().preprocess_flag!(ret) - ret +class Clauses + def self.match_segment(segment) + { + "attribute": "", + "op": "segmentMatch", + "values": [ segment.is_a?(String) ? segment : segment[:key] ], + } end - private def preprocessed_segment(o) - ret = clone_json_object(o) - LaunchDarkly::Impl::DataModelPreprocessing::Preprocessor.new().preprocess_segment!(ret) - ret + def self.match_user(user, attr = :key) + { + "attribute": attr.to_s, + "op": "in", + "values": [ user.get_value(attr) ], + "contextKind": user.individual_context(0).kind, + } end end @@ -52,7 +54,7 @@ def initialize(key) end def build - DataItemFactory.new(true).flag(@flag) + Flags.from_hash(@flag) end def version(value) @@ -71,7 +73,7 @@ def on(value) end def rule(r) - @flag[:rules].append(r.build) + @flag[:rules].append(r.is_a?(RuleBuilder) ? r.build : r) self end @@ -142,18 +144,43 @@ def track_events(value) end end +class SegmentRuleBuilder + def initialize() + @rule = { + clauses: [], + } + end + + def build + @rule.clone + end + + def clause(c) + @rule[:clauses].append(c) + self + end +end + class SegmentBuilder def initialize(key) @segment = { key: key, version: 1, - included: [], - excluded: [], + included: [], + excluded: [], + includedContexts: [], + excludedContexts: [], + rules: [], } end def build - DataItemFactory.new(true).segment(@segment) + Segments.from_hash(@segment) + end + + def version(value) + @segment[:version] = value + self end def included(*keys) @@ -161,11 +188,26 @@ def included(*keys) self end + def included_contexts(kind, *keys) + @segment[:includedContexts].append({ contextKind: kind, values: keys }) + self + end + + def excluded_contexts(kind, *keys) + @segment[:excludedContexts].append({ contextKind: kind, values: keys }) + self + end + def excluded(*keys) @segment[:excluded] = keys self end + def rule(r) + @segment[:rules].append(r.is_a?(SegmentRuleBuilder) ? r.build : r) + self + end + def unbounded(value) @segment[:unbounded] = value self @@ -177,20 +219,39 @@ def generation(value) end end -class Clauses - def self.match_segment(segment) +class DataSetBuilder + def initialize + @flags = {} + @segments = {} + end + + def flag(data) + f = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::FEATURES, data) + @flags[f.key.to_sym] = f + self + end + + def segment(data) + s = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::SEGMENTS, data) + @segments[s.key.to_sym] = s + self + end + + def to_store_data { - "attribute": "", - "op": "segmentMatch", - "values": [ segment.is_a?(Hash) ? segment[:key] : segment ], + LaunchDarkly::FEATURES => @flags, + LaunchDarkly::SEGMENTS => @segments } end - def self.match_user(user) + def to_hash { - "attribute": "key", - "op": "in", - "values": [ user[:key] ], + flags: @flags, + segments: @segments } end + + def to_json + to_hash.to_json + end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3582d3db..7fea7733 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -5,8 +5,6 @@ $sdk_key = "secret" describe LaunchDarkly::Requestor do - factory = DataItemFactory.new(true) # true = enable the usual preprocessing logic - def with_requestor(base_uri, opts = {}) r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new({ base_uri: base_uri, application: {id: "id", version: "version"} }.merge(opts))) begin @@ -34,12 +32,12 @@ def with_requestor(base_uri, opts = {}) end it "parses response" do - expected_data = { flags: { x: factory.flag({ key: "x" }) } } + expected_data = DataSetBuilder.new.flag(FlagBuilder.new("x").build) with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| server.setup_ok_response("/", expected_data.to_json) data = requestor.request_all_data() - expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data) + expect(data).to eq expected_data.to_store_data end end end @@ -49,7 +47,7 @@ def with_requestor(base_uri, opts = {}) logger.level = ::Logger::DEBUG with_server do |server| with_requestor(server.base_uri.to_s, { logger: logger }) do |requestor| - server.setup_ok_response("/", { flags: { x: { key: "y" } } }.to_json) + server.setup_ok_response("/", FlagBuilder.new("x").build.to_json) expect do requestor.request_all_data() end.to output(/\[LDClient\] Got response from uri:/).to_stdout_from_any_process @@ -91,7 +89,7 @@ def with_requestor(base_uri, opts = {}) it "can reuse cached data" do etag = "xyz" - expected_data = { flags: { x: factory.flag({ key: "x" }) } } + expected_data = DataSetBuilder.new.flag(FlagBuilder.new("x").build) with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| server.setup_response("/") do |req, res| @@ -108,7 +106,7 @@ def with_requestor(base_uri, opts = {}) data = requestor.request_all_data() expect(server.requests.count).to eq 2 expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) - expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data) + expect(data).to eq expected_data.to_store_data end end end @@ -116,8 +114,8 @@ def with_requestor(base_uri, opts = {}) it "replaces cached data with new data" do etag1 = "abc" etag2 = "xyz" - expected_data1 = { flags: { x: factory.flag({ key: "x" }) } } - expected_data2 = { flags: { y: factory.flag({ key: "y" }) } } + expected_data1 = DataSetBuilder.new.flag(FlagBuilder.new("x").build) + expected_data2 = DataSetBuilder.new.flag(FlagBuilder.new("y").build) with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| server.setup_response("/") do |req, res| @@ -126,14 +124,14 @@ def with_requestor(base_uri, opts = {}) res["ETag"] = etag1 end data = requestor.request_all_data() - expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data1) + expect(data).to eq expected_data1.to_store_data expect(server.requests.count).to eq 1 server.setup_response("/") do |req, res| res.status = 304 end data = requestor.request_all_data() - expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data1) + expect(data).to eq expected_data1.to_store_data expect(server.requests.count).to eq 2 expect(server.requests[1].header).to include({ "if-none-match" => [ etag1 ] }) @@ -143,7 +141,7 @@ def with_requestor(base_uri, opts = {}) res["ETag"] = etag2 end data = requestor.request_all_data() - expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data2) + expect(data).to eq expected_data2.to_store_data expect(server.requests.count).to eq 3 expect(server.requests[2].header).to include({ "if-none-match" => [ etag1 ] }) @@ -151,7 +149,7 @@ def with_requestor(base_uri, opts = {}) res.status = 304 end data = requestor.request_all_data() - expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data2) + expect(data).to eq expected_data2.to_store_data expect(server.requests.count).to eq 4 expect(server.requests[3].header).to include({ "if-none-match" => [ etag2 ] }) end @@ -159,24 +157,24 @@ def with_requestor(base_uri, opts = {}) end it "uses UTF-8 encoding by default" do - content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + expected_data = DataSetBuilder.new.flag(FlagBuilder.new("flagkey").variations("blue", "grėeń").build) with_server do |server| - server.setup_ok_response("/sdk/latest-all", content, "application/json") + server.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json") with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(JSON.parse(content, symbolize_names: true))) + expect(data).to eq expected_data.to_store_data end end end it "detects other encodings from Content-Type" do - content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["proszę", "dziękuję"]}}}' + expected_data = DataSetBuilder.new.flag(FlagBuilder.new("flagkey").variations("proszę", "dziękuję").build) with_server do |server| - server.setup_ok_response("/sdk/latest-all", content.encode(Encoding::ISO_8859_2), + server.setup_ok_response("/sdk/latest-all", expected_data.to_json.encode(Encoding::ISO_8859_2), "text/plain; charset=ISO-8859-2") with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(JSON.parse(content, symbolize_names: true))) + expect(data).to eq expected_data.to_store_data end end end @@ -200,14 +198,14 @@ def with_requestor(base_uri, opts = {}) # use a real proxy that really forwards requests to another test server, because # that test server would be at localhost, and proxy environment variables are # ignored if the target is localhost. - expected_data = { flags: { flagkey: factory.flag({ key: "flagkey" }) } } + expected_data = DataSetBuilder.new.flag(FlagBuilder.new("x").build) with_server do |proxy| proxy.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json", { "etag" => "x" }) begin ENV["http_proxy"] = proxy.base_uri.to_s with_requestor(fake_target_uri) do |requestor| data = requestor.request_all_data - expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(expected_data)) + expect(data).to eq expected_data.to_store_data end ensure ENV["http_proxy"] = nil diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 4f2d7b85..cb89830a 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -3,8 +3,6 @@ require "spec_helper" describe LaunchDarkly::StreamProcessor do - factory = DataItemFactory.new(true) # true = enable the usual preprocessing logic - subject { LaunchDarkly::StreamProcessor } let(:config) { LaunchDarkly::Config.new } let(:processor) { subject.new("sdk_key", config) } @@ -18,16 +16,16 @@ it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(factory.flag(key: "asdf")) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(factory.segment(key: "segkey")) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(Flags.from_hash(key: "asdf")) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(Segments.from_hash(key: "segkey")) end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(factory.flag(key: "asdf", version: 1)) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(Flags.from_hash(key: "asdf", version: 1)) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(factory.segment(key: "asdf", version: 1)) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(Segments.from_hash(key: "asdf", version: 1)) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) From 6204833e68f1564a161ad31e948d11a6e5703c66 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 2 Nov 2022 19:12:39 -0700 Subject: [PATCH 264/292] lint --- lib/ldclient-rb/impl/model/segment.rb | 6 +++--- lib/ldclient-rb/impl/model/serialization.rb | 4 ++-- spec/model_builders.rb | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index 36d873fc..1b283fef 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -62,9 +62,9 @@ def [](key) end def ==(other) - other.is_a?(Segment) && other.data == self.data - end - + other.is_a?(Segment) && other.data == self.data + end + def as_json(*) # parameter is unused, but may be passed if we're using the json gem @data end diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index 9f3e4c28..5751be64 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -35,9 +35,9 @@ def self.serialize(kind, item) # Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format. def self.make_all_store_data(received_data, logger = nil) - return { + { FEATURES => (received_data[:flags] || {}).transform_values { |data| FeatureFlag.new(data, logger) }, - SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) } + SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) }, } end end diff --git a/spec/model_builders.rb b/spec/model_builders.rb index 72a69c27..7ec784ef 100644 --- a/spec/model_builders.rb +++ b/spec/model_builders.rb @@ -240,18 +240,18 @@ def segment(data) def to_store_data { LaunchDarkly::FEATURES => @flags, - LaunchDarkly::SEGMENTS => @segments + LaunchDarkly::SEGMENTS => @segments, } end def to_hash { flags: @flags, - segments: @segments + segments: @segments, } end - def to_json + def to_json(*) to_hash.to_json end end From fd25f46921da4cc18c1b4ca246019114e908778d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 2 Nov 2022 19:24:55 -0700 Subject: [PATCH 265/292] remove [] override methods in places where we don't need them --- lib/ldclient-rb/impl/model/clause.rb | 4 ---- lib/ldclient-rb/impl/model/feature_flag.rb | 6 ------ 2 files changed, 10 deletions(-) diff --git a/lib/ldclient-rb/impl/model/clause.rb b/lib/ldclient-rb/impl/model/clause.rb index 2e30a004..c342c965 100644 --- a/lib/ldclient-rb/impl/model/clause.rb +++ b/lib/ldclient-rb/impl/model/clause.rb @@ -25,10 +25,6 @@ def initialize(data) # @return [Boolean] attr_reader :negate - def [](key) - @data[key] - end - def as_json @data end diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index 2a094e51..43b8b41d 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -117,12 +117,6 @@ def initialize(data, flag, logger) # @return [LaunchDarkly::EvaluationDetail] attr_reader :match_result - # This method allows us to read properties of the object as if it's just a hash; we can remove it if we - # migrate entirely to using attributes of the class - def [](key) - @data[key] - end - def as_json @data end From 1a6a1d04da0cb7aee4c129f6dca5ad678c843e33 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 2 Nov 2022 19:46:27 -0700 Subject: [PATCH 266/292] comments --- lib/ldclient-rb/impl/model/clause.rb | 2 ++ lib/ldclient-rb/impl/model/feature_flag.rb | 8 ++---- lib/ldclient-rb/impl/model/segment.rb | 2 ++ lib/ldclient-rb/impl/model/serialization.rb | 29 ++++++++++++++++++++- 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/impl/model/clause.rb b/lib/ldclient-rb/impl/model/clause.rb index c342c965..61d8dbf2 100644 --- a/lib/ldclient-rb/impl/model/clause.rb +++ b/lib/ldclient-rb/impl/model/clause.rb @@ -1,4 +1,6 @@ +# See serialization.rb for implementation notes on the data model classes. + module LaunchDarkly module Impl module Model diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index 43b8b41d..20310729 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -1,6 +1,8 @@ require "ldclient-rb/impl/evaluator_helpers" require "ldclient-rb/impl/model/clause" +# See serialization.rb for implementation notes on the data model classes. + module LaunchDarkly module Impl module Model @@ -141,12 +143,6 @@ def initialize(data, rule_index, flag) # @return [LaunchDarkly::Impl::Model::EvalResultFactoryMultiVariations] attr_reader :match_results - # This method allows us to read properties of the object as if it's just a hash; we can remove it if we - # migrate entirely to using attributes of the class - def [](key) - @data[key] - end - def as_json @data end diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index 1b283fef..06b778d9 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -1,6 +1,8 @@ require "ldclient-rb/impl/model/clause" require "ldclient-rb/impl/model/preprocessed_data" +# See serialization.rb for implementation notes on the data model classes. + module LaunchDarkly module Impl module Model diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index 5751be64..40fe4faf 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -2,12 +2,38 @@ require "ldclient-rb/impl/model/preprocessed_data" require "ldclient-rb/impl/model/segment" +# General implementation notes about the data model classes in LaunchDarkly::Impl::Model-- +# +# As soon as we receive flag/segment JSON data from LaunchDarkly (or, read it from a database), we +# transform it into the model classes FeatureFlag, Segment, etc. The constructor of each of these +# classes takes a hash (the parsed JSON), and transforms it into an internal representation that +# is more efficient for evaluations. +# +# Validation works as follows: +# - A property value that is of the correct type, but is invalid for other reasons (for example, +# if a flag rule refers to variation index 5, but there are only 2 variations in the flag), does +# not prevent the flag from being parsed and stored. It does cause a warning to be logged, if a +# logger was passed to the constructor. +# - If a value's that is completely invalid for the schema, the constructor may throw an +# exception, causing the whole data set to be rejected. This is consistent with the behavior of +# the strongly-typed SDKs. +# +# Currently, the model classes also retain the original hash of the parsed JSON. This is because +# we may need to re-serialize them to JSON, and building the JSON on the fly would be very +# inefficient, so each model class has a to_json method that just returns the same Hash. If we +# are able in the future to either use a custom streaming serializer, or pass the JSON data +# straight through from LaunchDarkly to a database instead of re-serializing, we could stop +# retaining this data. + module LaunchDarkly module Impl module Model # Abstraction of deserializing a feature flag or segment that was read from a data store or # received from LaunchDarkly. # + # SDK code outside of Impl::Model should use this method instead of calling the model class + # constructors directly, so as not to rely on implementation details. + # # @param kind [Hash] normally either FEATURES or SEGMENTS # @param input [object] a JSON string or a parsed hash (or a data model object, in which case # we'll just return the original object) @@ -28,7 +54,8 @@ def self.deserialize(kind, input, logger = nil) end # Abstraction of serializing a feature flag or segment that will be written to a data store. - # Currently we just call to_json. + # Currently we just call to_json, but SDK code outside of Impl::Model should use this method + # instead of to_json, so as not to rely on implementation details. def self.serialize(kind, item) item.to_json end From 351b3ec38e8f782a2dbb7fcb22071e199d418f31 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 2 Nov 2022 20:36:33 -0700 Subject: [PATCH 267/292] migrate some more of the model to be non-hash classes --- lib/ldclient-rb/impl/evaluator.rb | 2 +- lib/ldclient-rb/impl/evaluator_bucketing.rb | 27 ++++--- lib/ldclient-rb/impl/model/feature_flag.rb | 81 ++++++++++++++++----- lib/ldclient-rb/impl/model/segment.rb | 5 +- lib/ldclient-rb/integrations/test_data.rb | 2 +- spec/impl/evaluator_bucketing_spec.rb | 50 ++++++------- 6 files changed, 105 insertions(+), 62 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index c592a2ba..c8f354d2 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -107,7 +107,7 @@ def eval_internal(flag, context, state) # Check custom rules flag.rules.each do |rule| if rule_match_context(rule, context, state) - return get_value_for_variation_or_rollout(flag, rule, context, rule.match_results) + return get_value_for_variation_or_rollout(flag, rule.variation_or_rollout, context, rule.match_results) end end diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index 37b061f0..1f577527 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -5,29 +5,28 @@ module EvaluatorBucketing # Applies either a fixed variation or a rollout for a rule (or the fallthrough rule). # # @param flag [Object] the feature flag - # @param rule [Object] the rule - # @param context [LDContext] the context properties + # @param vr [LaunchDarkly::Impl::Model::VariationOrRollout] the variation/rollout properties + # @param context [LaunchDarkly::LDContext] the context properties # @return [Array<[Number, nil], Boolean>] the variation index, or nil if there is an error - def self.variation_index_for_context(flag, rule, context) - - variation = rule[:variation] + def self.variation_index_for_context(flag, vr, context) + variation = vr.variation return variation, false unless variation.nil? # fixed variation - rollout = rule[:rollout] + rollout = vr.rollout return nil, false if rollout.nil? - variations = rollout[:variations] + variations = rollout.variations if !variations.nil? && variations.length > 0 # percentage rollout - rollout_is_experiment = rollout[:kind] == "experiment" - bucket_by = rollout_is_experiment ? nil : rollout[:bucketBy] + rollout_is_experiment = rollout.is_experiment + bucket_by = rollout_is_experiment ? nil : rollout.bucket_by bucket_by = 'key' if bucket_by.nil? - seed = rollout[:seed] - bucket = bucket_context(context, rollout[:contextKind], flag[:key], bucket_by, flag[:salt], seed) # may not be present + seed = rollout.seed + bucket = bucket_context(context, rollout.context_kind, flag.key, bucket_by, flag.salt, seed) # may not be present in_experiment = rollout_is_experiment && !bucket.nil? sum = 0 variations.each do |variate| - sum += variate[:weight].to_f / 100000.0 + sum += variate.weight.to_f / 100000.0 if bucket.nil? || bucket < sum - return variate[:variation], in_experiment && !variate[:untracked] + return variate.variation, in_experiment && !variate.untracked end end # The context's bucket value was greater than or equal to the end of the last bucket. This could happen due @@ -36,7 +35,7 @@ def self.variation_index_for_context(flag, rule, context) # this case (or changing the scaling, which would potentially change the results for *all* contexts), we # will simply put the context in the last bucket. last_variation = variations[-1] - [last_variation[:variation], in_experiment && !last_variation[:untracked]] + [last_variation.variation, in_experiment && !last_variation.untracked] else # the rule isn't well-formed [nil, false] end diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index 20310729..5b364adb 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -18,11 +18,9 @@ def initialize(data, logger = nil) return if @deleted @variations = data[:variations] || [] @on = !!data[:on] - @fallthrough = data[:fallthrough] + fallthrough = data[:fallthrough] || {} + @fallthrough = VariationOrRollout.new(fallthrough[:variation], fallthrough[:rollout]) @off_variation = data[:offVariation] - @off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off, logger) - @fallthrough_results = Preprocessor.precompute_multi_variation_results(self, - EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true)) @prerequisites = (data[:prerequisites] || []).map do |prereq_data| Prerequisite.new(prereq_data, self, logger) end @@ -32,6 +30,10 @@ def initialize(data, logger = nil) @rules = (data[:rules] || []).map.with_index do |rule_data, index| FlagRule.new(rule_data, index, self) end + @salt = data[:salt] + @off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off, logger) + @fallthrough_results = Preprocessor.precompute_multi_variation_results(self, + EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true)) end # @return [Hash] @@ -48,7 +50,7 @@ def initialize(data, logger = nil) attr_reader :on # @return [Integer|nil] attr_reader :off_variation - # @return [Hash] + # @return [LaunchDarkly::Impl::Model::VariationOrRollout] attr_reader :fallthrough # @return [LaunchDarkly::EvaluationDetail] attr_reader :off_result @@ -60,9 +62,12 @@ def initialize(data, logger = nil) attr_reader :targets # @return [Array] attr_reader :rules + # @return [String] + attr_reader :salt - # This method allows us to read properties of the object as if it's just a hash; we can remove it if we - # migrate entirely to using attributes of the class + # This method allows us to read properties of the object as if it's just a hash. Currently this is + # necessary because some data store logic is still written to expect hashes; we can remove it once + # we migrate entirely to using attributes of the class. def [](key) @data[key] end @@ -98,10 +103,6 @@ def initialize(data, flag, logger) attr_reader :variation # @return [LaunchDarkly::EvaluationDetail] attr_reader :failure_result - - def as_json - @data - end end class Target @@ -118,10 +119,6 @@ def initialize(data, flag, logger) attr_reader :values # @return [LaunchDarkly::EvaluationDetail] attr_reader :match_result - - def as_json - @data - end end class FlagRule @@ -130,6 +127,7 @@ def initialize(data, rule_index, flag) @clauses = (data[:clauses] || []).map do |clause_data| Clause.new(clause_data) end + @variation_or_rollout = VariationOrRollout.new(data[:variation], data[:rollout]) rule_id = data[:id] match_reason = EvaluationReason::rule_match(rule_index, rule_id) match_reason_in_experiment = EvaluationReason::rule_match(rule_index, rule_id, true) @@ -142,10 +140,59 @@ def initialize(data, rule_index, flag) attr_reader :clauses # @return [LaunchDarkly::Impl::Model::EvalResultFactoryMultiVariations] attr_reader :match_results + # @return [LaunchDarkly::Impl::Model::VariationOrRollout] + attr_reader :variation_or_rollout + end - def as_json - @data + class VariationOrRollout + def initialize(variation, rollout_data) + @variation = variation + @rollout = rollout_data.nil? ? nil : Rollout.new(rollout_data) end + + # @return [Integer|nil] + attr_reader :variation + # @return [Rollout|nil] currently we do not have a model class for the rollout + attr_reader :rollout + end + + class Rollout + def initialize(data) + @context_kind = data[:contextKind] + @variations = (data[:variations] || []).map { |v| WeightedVariation.new(v) } + @bucket_by = data[:bucketBy] + @kind = data[:kind] + @is_experiment = @kind == "experiment" + @seed = data[:seed] + end + + # @return [String|nil] + attr_reader :context_kind + # @return [Array] + attr_reader :variations + # @return [String|nil] + attr_reader :bucket_by + # @return [String|nil] + attr_reader :kind + # @return [Boolean] + attr_reader :is_experiment + # @return [Integer|nil] + attr_reader :seed + end + + class WeightedVariation + def initialize(data) + @variation = data[:variation] + @weight = data[:weight] + @untracked = !!data[:untracked] + end + + # @return [Integer] + attr_reader :variation + # @return [Integer] + attr_reader :weight + # @return [Boolean] + attr_reader :untracked end # Clause is defined in its own file because clauses are used by both flags and segments diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index 06b778d9..47bc9ab1 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -57,8 +57,9 @@ def initialize(data, logger = nil) # @return [String] attr_reader :salt - # This method allows us to read properties of the object as if it's just a hash; we can remove it if we - # migrate entirely to using attributes of the class + # This method allows us to read properties of the object as if it's just a hash. Currently this is + # necessary because some data store logic is still written to expect hashes; we can remove it once + # we migrate entirely to using attributes of the class. def [](key) @data[key] end diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index f8f0ced4..cf38f01d 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -121,7 +121,7 @@ def update(flag_builder) if @current_flags[flag_key] then version = @current_flags[flag_key][:version] end - new_flag = Impl::Model::FeatureFlag.new(flag_builder.build(version+1)) + new_flag = Impl::Model.deserialize(FEATURES, flag_builder.build(version+1)) @current_flags[flag_key] = new_flag end update_item(FEATURES, new_flag) diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index e30479ba..b8f27d20 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -1,3 +1,4 @@ +require "model_builders" require "spec_helper" describe LaunchDarkly::Impl::EvaluatorBucketing do @@ -114,18 +115,17 @@ bad_variation_a = 0 matched_variation = 1 bad_variation_b = 2 - rule = { - rollout: { + vr = LaunchDarkly::Impl::Model::VariationOrRollout.new(nil, + { variations: [ { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) }, ], - }, - } - flag = { key: flag_key, salt: salt } + }) + flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) expect(result_variation).to be matched_variation expect(inExperiment).to be(false) end @@ -138,16 +138,15 @@ bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, nil) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value - rule = { - rollout: { + vr = LaunchDarkly::Impl::Model::VariationOrRollout.new(nil, + { variations: [ { variation: 0, weight: bucket_value }, ], - }, - } - flag = { key: flag_key, salt: salt } + }) + flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) expect(result_variation).to be 0 expect(inExperiment).to be(false) end @@ -163,9 +162,8 @@ salt = "salt" seed = 61 - - rule = { - rollout: { + vr = LaunchDarkly::Impl::Model::VariationOrRollout.new(nil, + { seed: seed, kind: 'experiment', variations: [ @@ -173,17 +171,16 @@ { variation: 2, weight: 20000, untracked: false }, { variation: 0, weight: 70000 , untracked: true }, ], - }, - } - flag = { key: flag_key, salt: salt } + }) + flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user1) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user1) expect(result_variation).to be(0) expect(inExperiment).to be(true) - result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user2) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user2) expect(result_variation).to be(2) expect(inExperiment).to be(true) - result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user3) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user3) expect(result_variation).to be(0) expect(inExperiment).to be(false) end @@ -197,18 +194,17 @@ bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, seed) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value - rule = { - rollout: { + vr = LaunchDarkly::Impl::Model::VariationOrRollout.new(nil, + { seed: seed, kind: 'experiment', variations: [ { variation: 0, weight: bucket_value, untracked: false }, ], - }, - } - flag = { key: flag_key, salt: salt } + }) + flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) expect(result_variation).to be 0 expect(inExperiment).to be(true) end From d4ff5bc73cab03ae5ccc3dfeff570af8ffcb0410 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 2 Nov 2022 20:40:07 -0700 Subject: [PATCH 268/292] lint --- spec/impl/evaluator_bucketing_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index b8f27d20..0415376d 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -202,7 +202,7 @@ { variation: 0, weight: bucket_value, untracked: false }, ], }) - flag = Flags.from_hash({ key: flag_key, salt: salt }) + flag = Flags.from_hash({ key: flag_key, salt: salt }) result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) expect(result_variation).to be 0 From 203a8e5de0085ddb4461677c805d1a12b6b15e9f Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 8 Nov 2022 14:31:32 -0500 Subject: [PATCH 269/292] Anonymous cannot be nil in new context format (#216) The legacy user format allowed anonymous to be missing or explicitly provided but set to nil. The new context format requires anonymous to either not be set, or if it is explicitly set, it must be a boolean value. --- lib/ldclient-rb/context.rb | 6 +++--- lib/ldclient-rb/impl/context.rb | 5 +++-- spec/context_spec.rb | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 91740335..4d15c8d2 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -345,7 +345,7 @@ def self.create_multi(contexts) end anonymous = data[:anonymous] - unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous) + unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous, true) return create_invalid_context("The anonymous value was set to a non-boolean value.") end @@ -399,8 +399,8 @@ def self.create_multi(contexts) return create_invalid_context("The name value was set to a non-string value.") end - anonymous = data[:anonymous] - unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous) + anonymous = data.fetch(:anonymous, false) + unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous, false) return create_invalid_context("The anonymous value was set to a non-boolean value.") end diff --git a/lib/ldclient-rb/impl/context.rb b/lib/ldclient-rb/impl/context.rb index e309ec73..1f0734bb 100644 --- a/lib/ldclient-rb/impl/context.rb +++ b/lib/ldclient-rb/impl/context.rb @@ -44,10 +44,11 @@ def self.validate_name(name) # # @param anonymous [any] + # @param allow_nil [Boolean] # @return [Boolean] # - def self.validate_anonymous(anonymous) - return true if anonymous.nil? + def self.validate_anonymous(anonymous, allow_nil) + return true if anonymous.nil? && allow_nil [true, false].include? anonymous end end diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 10251b40..cf052f8d 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -115,6 +115,7 @@ it "anonymous is required to be a boolean or nil" do expect(subject.create({ key: "key", kind: "user" }).valid?).to be true + expect(subject.create({ key: "key", kind: "user", anonymous: nil }).valid?).to be false expect(subject.create({ key: "key", kind: "user", anonymous: true }).valid?).to be true expect(subject.create({ key: "key", kind: "user", anonymous: false }).valid?).to be true expect(subject.create({ key: "key", kind: "user", anonymous: 0 }).valid?).to be false From 56bf82d220ea1d9b3fbdac6979bffc0e5f635285 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 8 Nov 2022 16:04:08 -0500 Subject: [PATCH 270/292] Tweak error message language and style (#217) Our previous error messages suffered from a couple drawbacks: - The messages were complete sentences, limiting our ability to compose error messages - The messages were overly broad in many cases - The messages unnecessarily required string interpolation that rarely provided much value These new messages are more succinct and are written as small clauses which can be used in conjunction with other error messages more easily. --- lib/ldclient-rb/context.rb | 70 +++++++++++++++++++-------------- lib/ldclient-rb/impl/context.rb | 46 ++++++++++++++++------ spec/impl/context_spec.rb | 24 +++++------ 3 files changed, 87 insertions(+), 53 deletions(-) diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 4d15c8d2..8b1532c5 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -21,6 +21,22 @@ class LDContext KIND_DEFAULT = "user" KIND_MULTI = "multi" + ERR_NOT_HASH = 'context data is not a hash' + private_constant :ERR_NOT_HASH + ERR_KEY_EMPTY = 'context key must not be null or empty' + private_constant :ERR_KEY_EMPTY + ERR_KIND_MULTI_NON_CONTEXT_ARRAY = 'context data must be an array of valid LDContexts' + private_constant :ERR_KIND_MULTI_NON_CONTEXT_ARRAY + ERR_KIND_MULTI_CANNOT_CONTAIN_MULTI = 'multi-kind context cannot contain another multi-kind context' + private_constant :ERR_KIND_MULTI_CANNOT_CONTAIN_MULTI + ERR_KIND_MULTI_WITH_NO_KINDS = 'multi-context must contain at least one kind' + private_constant :ERR_KIND_MULTI_WITH_NO_KINDS + ERR_KIND_MULTI_DUPLICATES = 'multi-kind context cannot have same kind more than once' + private_constant :ERR_KIND_MULTI_DUPLICATES + ERR_CUSTOM_NON_HASH = 'context custom must be a hash' + private_constant :ERR_CUSTOM_NON_HASH + ERR_PRIVATE_NON_ARRAY = 'context private attributes must be an array' + # @return [String, nil] Returns the key for this context attr_reader :key @@ -262,7 +278,7 @@ def self.with_key(key, kind = KIND_DEFAULT) # @return [LDContext] # def self.create(data) - return create_invalid_context("Cannot create an LDContext. Provided data is not a hash.") unless data.is_a?(Hash) + return create_invalid_context(ERR_NOT_HASH) unless data.is_a?(Hash) return create_legacy_context(data) unless data.has_key?(:kind) kind = data[:kind] @@ -298,19 +314,19 @@ def self.create(data) # @return [LDContext] # def self.create_multi(contexts) - return create_invalid_context("Multi-kind context requires an array of LDContexts") unless contexts.is_a?(Array) - return create_invalid_context("Multi-kind context requires at least one context") if contexts.empty? + return create_invalid_context(ERR_KIND_MULTI_NON_CONTEXT_ARRAY) unless contexts.is_a?(Array) + return create_invalid_context(ERR_KIND_MULTI_WITH_NO_KINDS) if contexts.empty? kinds = Set.new contexts.each do |context| if !context.is_a?(LDContext) - return create_invalid_context("Provided context is not an instance of LDContext") + return create_invalid_context(ERR_KIND_MULTI_NON_CONTEXT_ARRAY) elsif !context.valid? - return create_invalid_context("Provided context #{context.key} is invalid") + return create_invalid_context(ERR_KIND_MULTI_NON_CONTEXT_ARRAY) elsif context.multi_kind? - return create_invalid_context("Provided context #{context.key} is a multi-kind context") + return create_invalid_context(ERR_KIND_MULTI_CANNOT_CONTAIN_MULTI) elsif kinds.include? context.kind - return create_invalid_context("Kind #{context.kind} cannot occur twice in the same multi-kind context") + return create_invalid_context(ERR_KIND_MULTI_DUPLICATES) end kinds.add(context.kind) @@ -337,21 +353,19 @@ def self.create_multi(contexts) key = data[:key] # Legacy users are allowed to have "" as a key but they cannot have nil as a key. - return create_invalid_context("The key for the context was not valid") if key.nil? + return create_invalid_context(ERR_KEY_EMPTY) if key.nil? name = data[:name] - unless LaunchDarkly::Impl::Context.validate_name(name) - return create_invalid_context("The name value was set to a non-string value.") - end + name_error = LaunchDarkly::Impl::Context.validate_name(name) + return create_invalid_context(name_error) unless name_error.nil? anonymous = data[:anonymous] - unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous, true) - return create_invalid_context("The anonymous value was set to a non-boolean value.") - end + anonymous_error = LaunchDarkly::Impl::Context.validate_anonymous(anonymous, true) + return create_invalid_context(anonymous_error) unless anonymous_error.nil? custom = data[:custom] unless custom.nil? || custom.is_a?(Hash) - return create_invalid_context("The custom value was set to a non-hash value.") + return create_invalid_context(ERR_CUSTOM_NON_HASH) end # We only need to create an attribute hash if one of these keys exist. @@ -369,7 +383,7 @@ def self.create_multi(contexts) private_attributes = data[:privateAttributeNames] if private_attributes && !private_attributes.is_a?(Array) - return create_invalid_context("The provided private attributes are not an array") + return create_invalid_context(ERR_PRIVATE_NON_ARRAY) end new(key.to_s, KIND_DEFAULT, name, anonymous, attributes, private_attributes) @@ -382,32 +396,28 @@ def self.create_multi(contexts) # private_class_method def self.create_single_context(data, kind) unless data.is_a?(Hash) - return create_invalid_context("The provided data was not a hash") + return create_invalid_context(ERR_NOT_HASH) end - unless LaunchDarkly::Impl::Context.validate_kind(kind) - return create_invalid_context("The kind (#{kind || 'nil'}) was not valid for the provided context.") - end + kind_error = LaunchDarkly::Impl::Context.validate_kind(kind) + return create_invalid_context(kind_error) unless kind_error.nil? key = data[:key] - unless LaunchDarkly::Impl::Context.validate_key(key) - return create_invalid_context("The key (#{key || 'nil'}) was not valid for the provided context.") - end + key_error = LaunchDarkly::Impl::Context.validate_key(key) + return create_invalid_context(key_error) unless key_error.nil? name = data[:name] - unless LaunchDarkly::Impl::Context.validate_name(name) - return create_invalid_context("The name value was set to a non-string value.") - end + name_error = LaunchDarkly::Impl::Context.validate_name(name) + return create_invalid_context(name_error) unless name_error.nil? anonymous = data.fetch(:anonymous, false) - unless LaunchDarkly::Impl::Context.validate_anonymous(anonymous, false) - return create_invalid_context("The anonymous value was set to a non-boolean value.") - end + anonymous_error = LaunchDarkly::Impl::Context.validate_anonymous(anonymous, false) + return create_invalid_context(anonymous_error) unless anonymous_error.nil? meta = data.fetch(:_meta, {}) private_attributes = meta[:privateAttributes] if private_attributes && !private_attributes.is_a?(Array) - return create_invalid_context("The provided private attributes are not an array") + return create_invalid_context(ERR_PRIVATE_NON_ARRAY) end # We only need to create an attribute hash if there are keys set outside diff --git a/lib/ldclient-rb/impl/context.rb b/lib/ldclient-rb/impl/context.rb index 1f0734bb..ed80f032 100644 --- a/lib/ldclient-rb/impl/context.rb +++ b/lib/ldclient-rb/impl/context.rb @@ -1,6 +1,18 @@ module LaunchDarkly module Impl module Context + ERR_KIND_NON_STRING = 'context kind must be a string' + ERR_KIND_CANNOT_BE_KIND = '"kind" is not a valid context kind' + ERR_KIND_CANNOT_BE_MULTI = '"multi" is not a valid context kind' + ERR_KIND_INVALID_CHARS = 'context kind contains disallowed characters' + + ERR_KEY_NON_STRING = 'context key must be a string' + ERR_KEY_EMPTY = 'context key must not be empty' + + ERR_NAME_NON_STRING = 'context name must be a string' + + ERR_ANONYMOUS_NON_BOOLEAN = 'context anonymous must be a boolean' + # # We allow consumers of this SDK to provide us with either a Hash or an # instance of an LDContext. This is convenient for them but not as much @@ -16,40 +28,52 @@ def self.make_context(context) LDContext.create(context) end + # + # Returns an error message if the kind is invalid; nil otherwise. # # @param kind [any] - # @return [Boolean] + # @return [String, nil] # def self.validate_kind(kind) - return false unless kind.is_a?(String) - kind.match?(/^[\w.-]+$/) && kind != "kind" && kind != "multi" + return ERR_KIND_NON_STRING unless kind.is_a?(String) + return ERR_KIND_CANNOT_BE_KIND if kind == "kind" + return ERR_KIND_CANNOT_BE_MULTI if kind == "multi" + return ERR_KIND_INVALID_CHARS unless kind.match?(/^[\w.-]+$/) end + # + # Returns an error message if the key is invalid; nil otherwise. # # @param key [any] - # @return [Boolean] + # @return [String, nil] # def self.validate_key(key) - return false unless key.is_a?(String) - key != "" + return ERR_KEY_NON_STRING unless key.is_a?(String) + return ERR_KEY_EMPTY if key == "" end + # + # Returns an error message if the name is invalid; nil otherwise. # # @param name [any] - # @return [Boolean] + # @return [String, nil] # def self.validate_name(name) - name.nil? || name.is_a?(String) + return ERR_NAME_NON_STRING unless name.nil? || name.is_a?(String) end + # + # Returns an error message if anonymous is invalid; nil otherwise. # # @param anonymous [any] # @param allow_nil [Boolean] - # @return [Boolean] + # @return [String, nil] # def self.validate_anonymous(anonymous, allow_nil) - return true if anonymous.nil? && allow_nil - [true, false].include? anonymous + return nil if anonymous.nil? && allow_nil + return nil if [true, false].include? anonymous + + ERR_ANONYMOUS_NON_BOOLEAN end end end diff --git a/spec/impl/context_spec.rb b/spec/impl/context_spec.rb index 3e87f1f6..4dba4df1 100644 --- a/spec/impl/context_spec.rb +++ b/spec/impl/context_spec.rb @@ -5,27 +5,27 @@ it "can validate kind correctly" do test_cases = [ - [:user, false, "Kind is not a string"], - ["kind", false, "Kind cannot be 'kind'"], - ["multi", false, "Kind cannot be 'multi'"], - ["user@type", false, "Kind cannot include invalid characters"], - ["org", true, "Some kinds are valid"], + [:user, LaunchDarkly::Impl::Context::ERR_KIND_NON_STRING], + ["kind", LaunchDarkly::Impl::Context::ERR_KIND_CANNOT_BE_KIND], + ["multi", LaunchDarkly::Impl::Context::ERR_KIND_CANNOT_BE_MULTI], + ["user@type", LaunchDarkly::Impl::Context::ERR_KIND_INVALID_CHARS], + ["org", nil], ] - test_cases.each do |input, expected, _descr| + test_cases.each do |input, expected| expect(subject.validate_kind(input)).to eq(expected) end end it "can validate a key correctly" do test_cases = [ - [:key, false, "Key is not a string"], - ["", false, "Key cannot be ''"], - ["key", true, "Some keys are valid"], + [:key, LaunchDarkly::Impl::Context::ERR_KEY_NON_STRING], + ["", LaunchDarkly::Impl::Context::ERR_KEY_EMPTY], + ["key", nil], ] - test_cases.each do |input, expected, _descr| - expect(subject.validate_kind(input)).to eq(expected) + test_cases.each do |input, expected| + expect(subject.validate_key(input)).to eq(expected) end end -end +end \ No newline at end of file From 4018f314d637325d4190e7127adb60c82bc825ec Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Nov 2022 13:42:32 -0800 Subject: [PATCH 271/292] copyedit Co-authored-by: Matthew M. Keeler --- lib/ldclient-rb/impl/model/serialization.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index 40fe4faf..3bc3029d 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -14,7 +14,7 @@ # if a flag rule refers to variation index 5, but there are only 2 variations in the flag), does # not prevent the flag from being parsed and stored. It does cause a warning to be logged, if a # logger was passed to the constructor. -# - If a value's that is completely invalid for the schema, the constructor may throw an +# - If a value is completely invalid for the schema, the constructor may throw an # exception, causing the whole data set to be rejected. This is consistent with the behavior of # the strongly-typed SDKs. # From bbc442af79e2b75b82410d5f48bd463963fe688d Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 8 Dec 2022 13:40:55 -0600 Subject: [PATCH 272/292] Implement prerequisite cycle detection (#219) --- Makefile | 1 - lib/ldclient-rb/impl/evaluator.rb | 133 ++++++++++++++++--- spec/impl/evaluator_prereq_spec.rb | 202 +++++++++++++++++++++++++++++ spec/impl/evaluator_spec.rb | 149 --------------------- 4 files changed, 314 insertions(+), 171 deletions(-) create mode 100644 spec/impl/evaluator_prereq_spec.rb diff --git a/Makefile b/Makefile index a48d64b2..14fc588c 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,6 @@ TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ -skip 'evaluation/bucketing/bucket by non-key attribute/in rollouts/string value/complex attribute reference' \ -skip 'evaluation/parameterized/attribute references' \ -skip 'evaluation/parameterized/bad attribute reference errors' \ - -skip 'evaluation/parameterized/prerequisites' \ -skip 'evaluation/parameterized/segment recursion' \ -skip 'evaluation/parameterized/target match/context targets' \ -skip 'evaluation/parameterized/target match/multi-kind' \ diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index c8f354d2..7809c2f7 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -14,6 +14,67 @@ module Impl :detail # the EvaluationDetail representing the evaluation result ) + class EvaluationException < StandardError + def initialize(msg, error_kind) + super(msg) + @error_kind = error_kind + end + + # @return [Symbol] + attr_reader :error_kind + end + + # + # A helper class for managing cycle detection. + # + # Each time a method sees a new flag or segment, they can push that + # object's key onto the stack. Once processing for that object has + # finished, you can call pop to remove it. + # + # Because the most common use case would be a flag or segment without ANY + # prerequisites, this stack has a small optimization in place-- the stack + # is not created until absolutely necessary. + # + class EvaluatorStack + # @param original [String] + def initialize(original) + @original = original + # @type [Array, nil] + @stack = nil + end + + # @param key [String] + def push(key) + # No need to store the key if we already have a record in our instance + # variable. + return if @original == key + + # The common use case is that flags/segments won't have prereqs, so we + # don't allocate the stack memory until we absolutely must. + if @stack.nil? + @stack = [] + end + + @stack.push(key) + end + + def pop + return if @stack.nil? || @stack.empty? + @stack.pop + end + + # + # @param key [String] + # @return [Boolean] + # + def include?(key) + return true if key == @original + return false if @stack.nil? + + @stack.include? key + end + end + # Encapsulates the feature flag evaluation logic. The Evaluator has no knowledge of the rest of the SDK environment; # if it needs to retrieve flags or segments that are referenced by a flag, it does so through a simple function that # is provided in the constructor. It also produces feature requests as appropriate for any referenced prerequisite @@ -62,8 +123,21 @@ def self.error_result(errorKind, value = nil) # @param context [LaunchDarkly::LDContext] the evaluation context # @return [EvalResult] the evaluation result def evaluate(flag, context) + stack = EvaluatorStack.new(flag.key) + result = EvalResult.new - detail = eval_internal(flag, context, result) + begin + detail = eval_internal(flag, context, result, stack) + rescue EvaluationException => exn + LaunchDarkly::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) + result.detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(exn.error_kind)) + return result + rescue => exn + LaunchDarkly::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) + result.detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION)) + return result + end + unless result.big_segments_status.nil? # If big_segments_status is non-nil at the end of the evaluation, it means a query was done at # some point and we will want to include the status in the evaluation reason. @@ -87,12 +161,14 @@ def self.make_big_segment_ref(segment) # method is visible for testing # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag # @param context [LaunchDarkly::LDContext] the evaluation context # @param state [EvalResult] - def eval_internal(flag, context, state) + # @param stack [EvaluatorStack] + # @raise [EvaluationException] + def eval_internal(flag, context, state, stack) unless flag.on return flag.off_result end - prereq_failure_result = check_prerequisites(flag, context, state) + prereq_failure_result = check_prerequisites(flag, context, state, stack) return prereq_failure_result unless prereq_failure_result.nil? # Check context target matches @@ -122,18 +198,32 @@ def eval_internal(flag, context, state) # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag # @param context [LaunchDarkly::LDContext] the evaluation context # @param state [EvalResult] - def check_prerequisites(flag, context, state) - flag.prerequisites.each do |prerequisite| - prereq_ok = true - prereq_key = prerequisite.key - prereq_flag = @get_flag.call(prereq_key) - - if prereq_flag.nil? - @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag.key}\"" } - prereq_ok = false - else - begin - prereq_res = eval_internal(prereq_flag, context, state) + # @param stack [EvaluatorStack] + # @raise [EvaluationException] + def check_prerequisites(flag, context, state, stack) + return if flag.prerequisites.empty? + + stack.push(flag.key) + + begin + flag.prerequisites.each do |prerequisite| + prereq_ok = true + prereq_key = prerequisite.key + + if stack.include?(prereq_key) + raise LaunchDarkly::Impl::EvaluationException.new( + "prerequisite relationship to \"#{prereq_key}\" caused a circular reference; this is probably a temporary condition due to an incomplete update", + EvaluationReason::ERROR_MALFORMED_FLAG + ) + end + + prereq_flag = @get_flag.call(prereq_key) + + if prereq_flag.nil? + @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag.key}\"" } + prereq_ok = false + else + prereq_res = eval_internal(prereq_flag, context, state, stack) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag.on || prereq_res.variation_index != prerequisite.variation @@ -142,15 +232,16 @@ def check_prerequisites(flag, context, state) prereq_eval = PrerequisiteEvalRecord.new(prereq_flag, flag, prereq_res) state.prereq_evals = [] if state.prereq_evals.nil? state.prereq_evals.push(prereq_eval) - rescue => exn - Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag.key}\"", exn) - prereq_ok = false + end + + unless prereq_ok + return prerequisite.failure_result end end - unless prereq_ok - return prerequisite.failure_result - end + ensure + stack.pop end + nil end diff --git a/spec/impl/evaluator_prereq_spec.rb b/spec/impl/evaluator_prereq_spec.rb new file mode 100644 index 00000000..87146ac7 --- /dev/null +++ b/spec/impl/evaluator_prereq_spec.rb @@ -0,0 +1,202 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "evaluate", :evaluator_spec_base => true do + it "returns off variation if prerequisite is not found" do + flag = Flags.from_hash( + { + key: 'feature0', + on: true, + prerequisites: [{ key: 'badfeature', variation: 1 }], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: %w[a b c], + } + ) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build + result = e.evaluate(flag, context) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(nil) + end + + it "reuses prerequisite-failed result detail instances" do + flag = Flags.from_hash( + { + key: 'feature0', + on: true, + prerequisites: [{ key: 'badfeature', variation: 1 }], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: %w[a b c], + } + ) + context = LDContext.create({ key: 'x' }) + e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build + result1 = e.evaluate(flag, context) + expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') + result2 = e.evaluate(flag, context) + expect(result2.detail).to be result1.detail + end + + it "returns off variation and event if prerequisite of a prerequisite is not found" do + flag = Flags.from_hash( + { + key: 'feature0', + on: true, + prerequisites: [{ key: 'feature1', variation: 1 }], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: %w[a b c], + version: 1, + } + ) + flag1 = Flags.from_hash( + { + key: 'feature1', + on: true, + prerequisites: [{ key: 'feature2', variation: 1 }], # feature2 doesn't exist + fallthrough: { variation: 0 }, + variations: %w[d e], + version: 2, + } + ) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new(nil, nil, EvaluationReason::prerequisite_failed('feature2'))), + ] + e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build + result = e.evaluate(flag, context) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(expected_prereqs) + end + + it "returns off variation and event if prerequisite is off" do + flag = Flags.from_hash( + { + key: 'feature0', + on: true, + prerequisites: [{ key: 'feature1', variation: 1 }], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: %w[a b c], + version: 1, + } + ) + flag1 = Flags.from_hash( + { + key: 'feature1', + on: false, + # note that even though it returns the desired variation, it is still off and therefore not a match + offVariation: 1, + fallthrough: { variation: 0 }, + variations: %w[d e], + version: 2, + } + ) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::off)), + ] + e = EvaluatorBuilder.new(logger).with_flag(flag1).build + result = e.evaluate(flag, context) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(expected_prereqs) + end + + it "returns off variation and event if prerequisite is not met" do + flag = Flags.from_hash( + { + key: 'feature0', + on: true, + prerequisites: [{ key: 'feature1', variation: 1 }], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: %w[a b c], + version: 1, + } + ) + flag1 = Flags.from_hash( + { + key: 'feature1', + on: true, + fallthrough: { variation: 0 }, + variations: %w[d e], + version: 2, + } + ) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('d', 0, EvaluationReason::fallthrough)), + ] + e = EvaluatorBuilder.new(logger).with_flag(flag1).build + result = e.evaluate(flag, context) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(expected_prereqs) + end + + it "returns fallthrough variation and event if prerequisite is met and there are no rules" do + flag = Flags.from_hash( + { + key: 'feature0', + on: true, + prerequisites: [{ key: 'feature1', variation: 1 }], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: %w[a b c], + version: 1, + } + ) + flag1 = Flags.from_hash( + { + key: 'feature1', + on: true, + fallthrough: { variation: 1 }, + variations: %w[d e], + version: 2, + } + ) + context = LDContext.create({ key: 'x' }) + detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) + expected_prereqs = [ + PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::fallthrough)), + ] + e = EvaluatorBuilder.new(logger).with_flag(flag1).build + result = e.evaluate(flag, context) + expect(result.detail).to eq(detail) + expect(result.prereq_evals).to eq(expected_prereqs) + end + + (1..4).each do |depth| + it "correctly detects cycles are at a depth of #{depth}" do + flags = [] + (0...depth).each do |i| + flags << Flags.from_hash( + { + key: "flagkey#{i}", + on: true, + offVariation: 0, + prerequisites: [{ key: "flagkey#{(i + 1) % depth}", variation: 0 }], + variations: [false, true], + } + ) + end + + builder = EvaluatorBuilder.new(logger) + flags.each { |flag| builder.with_flag(flag) } + + evaluator = builder.build + result = evaluator.evaluate(flags[0], LDContext.with_key('user')) + reason = EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG) + expect(result.detail.reason).to eq(reason) + end + end + end + end +end \ No newline at end of file diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 57786026..47a933d3 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -84,155 +84,6 @@ module Impl expect(result.prereq_evals).to eq(nil) end - it "returns off variation if prerequisite is not found" do - flag = Flags.from_hash({ - key: 'feature0', - on: true, - prerequisites: [{key: 'badfeature', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) - e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result = e.evaluate(flag, context) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(nil) - end - - it "reuses prerequisite-failed result detail instances" do - flag = Flags.from_hash({ - key: 'feature0', - on: true, - prerequisites: [{key: 'badfeature', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - }) - context = LDContext.create({ key: 'x' }) - e = EvaluatorBuilder.new(logger).with_unknown_flag('badfeature').build - result1 = e.evaluate(flag, context) - expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') - result2 = e.evaluate(flag, context) - expect(result2.detail).to be result1.detail - end - - it "returns off variation and event if prerequisite of a prerequisite is not found" do - flag = Flags.from_hash({ - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1, - }) - flag1 = Flags.from_hash({ - key: 'feature1', - on: true, - prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2, - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) - expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new(nil, nil, EvaluationReason::prerequisite_failed('feature2'))), - ] - e = EvaluatorBuilder.new(logger).with_flag(flag1).with_unknown_flag('feature2').build - result = e.evaluate(flag, context) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(expected_prereqs) - end - - it "returns off variation and event if prerequisite is off" do - flag = Flags.from_hash({ - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1, - }) - flag1 = Flags.from_hash({ - key: 'feature1', - on: false, - # note that even though it returns the desired variation, it is still off and therefore not a match - offVariation: 1, - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2, - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) - expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::off)), - ] - e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, context) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(expected_prereqs) - end - - it "returns off variation and event if prerequisite is not met" do - flag = Flags.from_hash({ - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1, - }) - flag1 = Flags.from_hash({ - key: 'feature1', - on: true, - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2, - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) - expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('d', 0, EvaluationReason::fallthrough)), - ] - e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, context) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(expected_prereqs) - end - - it "returns fallthrough variation and event if prerequisite is met and there are no rules" do - flag = Flags.from_hash({ - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1, - }) - flag1 = Flags.from_hash({ - key: 'feature1', - on: true, - fallthrough: { variation: 1 }, - variations: ['d', 'e'], - version: 2, - }) - context = LDContext.create({ key: 'x' }) - detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) - expected_prereqs = [ - PrerequisiteEvalRecord.new(flag1, flag, EvaluationDetail.new('e', 1, EvaluationReason::fallthrough)), - ] - e = EvaluatorBuilder.new(logger).with_flag(flag1).build - result = e.evaluate(flag, context) - expect(result.detail).to eq(detail) - expect(result.prereq_evals).to eq(expected_prereqs) - end - it "returns fallthrough variation if flag is on and no rules match" do flag = Flags.from_hash({ key: 'feature0', From 22d6aa45196a81fca5f2de3488f3259394fdae68 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 8 Dec 2022 14:08:50 -0600 Subject: [PATCH 273/292] Support attribute reference lookups (#215) This adds support for slash-delimited paths in clause attributes, bucketBy, etc. It does not do anything related to private attribute redaction because none of the U2C event logic is implemented yet. --- Makefile | 3 --- lib/ldclient-rb/impl/evaluator.rb | 30 ++++++++++++++++----- lib/ldclient-rb/impl/evaluator_bucketing.rb | 7 ++++- lib/ldclient-rb/impl/model/clause.rb | 9 ++++--- lib/ldclient-rb/impl/model/feature_flag.rb | 6 ++--- lib/ldclient-rb/impl/model/segment.rb | 6 ++--- spec/impl/evaluator_bucketing_spec.rb | 9 +++++++ spec/impl/evaluator_prereq_spec.rb | 2 +- 8 files changed, 51 insertions(+), 21 deletions(-) diff --git a/Makefile b/Makefile index 14fc588c..1fcee3b3 100644 --- a/Makefile +++ b/Makefile @@ -6,9 +6,6 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log # - various other "evaluation" subtests: These tests require context kind support. # - "events": These test suites will be unavailable until more of the U2C implementation is done. TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ - -skip 'evaluation/bucketing/bucket by non-key attribute/in rollouts/string value/complex attribute reference' \ - -skip 'evaluation/parameterized/attribute references' \ - -skip 'evaluation/parameterized/bad attribute reference errors' \ -skip 'evaluation/parameterized/segment recursion' \ -skip 'evaluation/parameterized/target match/context targets' \ -skip 'evaluation/parameterized/target match/multi-kind' \ diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 7809c2f7..faab6678 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -15,7 +15,7 @@ module Impl ) class EvaluationException < StandardError - def initialize(msg, error_kind) + def initialize(msg, error_kind = EvaluationReason::ERROR_MALFORMED_FLAG) super(msg) @error_kind = error_kind end @@ -24,6 +24,9 @@ def initialize(msg, error_kind) attr_reader :error_kind end + class InvalidReferenceException < EvaluationException + end + # # A helper class for managing cycle detection. # @@ -199,7 +202,7 @@ def eval_internal(flag, context, state, stack) # @param context [LaunchDarkly::LDContext] the evaluation context # @param state [EvalResult] # @param stack [EvaluatorStack] - # @raise [EvaluationException] + # @raise [EvaluationException] if a flag prereq cycle is detected def check_prerequisites(flag, context, state, stack) return if flag.prerequisites.empty? @@ -212,8 +215,7 @@ def check_prerequisites(flag, context, state, stack) if stack.include?(prereq_key) raise LaunchDarkly::Impl::EvaluationException.new( - "prerequisite relationship to \"#{prereq_key}\" caused a circular reference; this is probably a temporary condition due to an incomplete update", - EvaluationReason::ERROR_MALFORMED_FLAG + "prerequisite relationship to \"#{prereq_key}\" caused a circular reference; this is probably a temporary condition due to an incomplete update" ) end @@ -248,6 +250,8 @@ def check_prerequisites(flag, context, state, stack) # @param rule [LaunchDarkly::Impl::Model::FlagRule] # @param context [LaunchDarkly::LDContext] # @param state [EvalResult] + # @return [Boolean] + # @raise [InvalidReferenceException] def rule_match_context(rule, context, state) rule.clauses.each do |clause| return false unless clause_match_context(clause, context, state) @@ -259,6 +263,8 @@ def rule_match_context(rule, context, state) # @param clause [LaunchDarkly::Impl::Model::Clause] # @param context [LaunchDarkly::LDContext] # @param state [EvalResult] + # @return [Boolean] + # @raise [InvalidReferenceException] def clause_match_context(clause, context, state) # In the case of a segment match operator, we check if the context is in any of the segments, # and possibly negate @@ -304,8 +310,11 @@ def clause_match_context(clause, context, state) # @param clause [LaunchDarkly::Impl::Model::Clause] # @param context [LaunchDarkly::LDContext] # @return [Boolean] + # @raise [InvalidReferenceException] Raised if the clause.attribute is an invalid reference def clause_match_context_no_segments(clause, context) - if clause.attribute == "kind" + raise InvalidReferenceException.new(clause.attribute.error) unless clause.attribute.error.nil? + + if clause.attribute.depth == 1 && clause.attribute.component(0) == :kind result = clause_match_by_kind(clause, context) return clause.negate ? !result : result end @@ -313,7 +322,7 @@ def clause_match_context_no_segments(clause, context) matched_context = context.individual_context(clause.context_kind || LaunchDarkly::LDContext::KIND_DEFAULT) return false if matched_context.nil? - user_val = matched_context.get_value(clause.attribute) + user_val = matched_context.get_value_for_reference(clause.attribute) return false if user_val.nil? result = if user_val.is_a? Enumerable @@ -399,6 +408,7 @@ def simple_segment_match_context(segment, context, use_includes_and_excludes) # @param segment_key [String] # @param salt [String] # @return [Boolean] + # @raise [InvalidReferenceException] def segment_rule_match_context(rule, context, segment_key, salt) rule.clauses.each do |c| return false unless clause_match_context_no_segments(c, context) @@ -408,7 +418,12 @@ def segment_rule_match_context(rule, context, segment_key, salt) return true unless rule.weight # All of the clauses are met. See if the user buckets in - bucket = EvaluatorBucketing.bucket_context(context, rule.rollout_context_kind, segment_key, rule.bucket_by || "key", salt, nil) + begin + bucket = EvaluatorBucketing.bucket_context(context, rule.rollout_context_kind, segment_key, rule.bucket_by || "key", salt, nil) + rescue InvalidReferenceException + return false + end + weight = rule.weight.to_f / 100000.0 bucket.nil? || bucket < weight end @@ -417,6 +432,7 @@ def segment_rule_match_context(rule, context, segment_key, salt) def get_value_for_variation_or_rollout(flag, vr, context, precomputed_results) index, in_experiment = EvaluatorBucketing.variation_index_for_context(flag, vr, context) + if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag.key}\": variation/rollout object with no variation or rollout") return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index 1f577527..13b5d9c6 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -8,6 +8,7 @@ module EvaluatorBucketing # @param vr [LaunchDarkly::Impl::Model::VariationOrRollout] the variation/rollout properties # @param context [LaunchDarkly::LDContext] the context properties # @return [Array<[Number, nil], Boolean>] the variation index, or nil if there is an error + # @raise [InvalidReferenceException] def self.variation_index_for_context(flag, vr, context) variation = vr.variation return variation, false unless variation.nil? # fixed variation @@ -49,11 +50,15 @@ def self.variation_index_for_context(flag, vr, context) # @param bucket_by [String|Symbol] the name of the context attribute to be used for bucketing # @param salt [String] the feature flag's or segment's salt value # @return [Float, nil] the bucket value, from 0 inclusive to 1 exclusive + # @raise [InvalidReferenceException] Raised if the clause.attribute is an invalid reference def self.bucket_context(context, context_kind, key, bucket_by, salt, seed) matched_context = context.individual_context(context_kind || LaunchDarkly::LDContext::KIND_DEFAULT) return nil if matched_context.nil? - context_value = matched_context.get_value(bucket_by) + reference = (context_kind.nil? || context_kind.empty?) ? Reference.create_literal(bucket_by) : Reference.create(bucket_by) + raise InvalidReferenceException.new(reference.error) unless reference.error.nil? + + context_value = matched_context.get_value_for_reference(reference) return 0.0 if context_value.nil? id_hash = bucketable_string_value(context_value) diff --git a/lib/ldclient-rb/impl/model/clause.rb b/lib/ldclient-rb/impl/model/clause.rb index 61d8dbf2..0227dc30 100644 --- a/lib/ldclient-rb/impl/model/clause.rb +++ b/lib/ldclient-rb/impl/model/clause.rb @@ -5,10 +5,13 @@ module LaunchDarkly module Impl module Model class Clause - def initialize(data) + def initialize(data, logger) @data = data @context_kind = data[:contextKind] - @attribute = data[:attribute] + @attribute = (@context_kind.nil? || @context_kind.empty?) ? Reference.create_literal(data[:attribute]) : Reference.create(data[:attribute]) + unless logger.nil? || @attribute.error.nil? + logger.error("[LDClient] Data inconsistency in feature flag: #{@attribute.error}") + end @op = data[:op].to_sym @values = data[:values] || [] @negate = !!data[:negate] @@ -18,7 +21,7 @@ def initialize(data) attr_reader :data # @return [String|nil] attr_reader :context_kind - # @return [String] + # @return [LaunchDarkly::Reference] attr_reader :attribute # @return [Symbol] attr_reader :op diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index 5b364adb..bd6ef6d7 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -28,7 +28,7 @@ def initialize(data, logger = nil) Target.new(target_data, self, logger) end @rules = (data[:rules] || []).map.with_index do |rule_data, index| - FlagRule.new(rule_data, index, self) + FlagRule.new(rule_data, index, self, logger) end @salt = data[:salt] @off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off, logger) @@ -122,10 +122,10 @@ def initialize(data, flag, logger) end class FlagRule - def initialize(data, rule_index, flag) + def initialize(data, rule_index, flag, logger) @data = data @clauses = (data[:clauses] || []).map do |clause_data| - Clause.new(clause_data) + Clause.new(clause_data, logger) end @variation_or_rollout = VariationOrRollout.new(data[:variation], data[:rollout]) rule_id = data[:id] diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index 47bc9ab1..be464827 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -25,7 +25,7 @@ def initialize(data, logger = nil) SegmentTarget.new(target_data) end @rules = (data[:rules] || []).map do |rule_data| - SegmentRule.new(rule_data) + SegmentRule.new(rule_data, logger) end @unbounded = !!data[:unbounded] @generation = data[:generation] @@ -94,10 +94,10 @@ def initialize(data) end class SegmentRule - def initialize(data) + def initialize(data, logger) @data = data @clauses = (data[:clauses] || []).map do |clause_data| - Clause.new(clause_data) + Clause.new(clause_data, logger) end @weight = data[:weight] @bucket_by = data[:bucketBy] diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index 0415376d..e7878d0b 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -60,6 +60,15 @@ expect(bucket).to be_within(0.0000001).of(0.10343106) end + it "treats the bucket by attribute as a reference when a context kind isn't specified" do + user = LaunchDarkly::LDContext.create({ key: "userKeyB", kind: "user", address: { street: "123 Easy St", city: "Anytown" } }) + bucket = subject.bucket_context(user, user.kind, "hashKey", "/address/street", "saltyA", nil) + expect(bucket).to be_within(0.0000001).of(0.56809287) + + bucket = subject.bucket_context(user, nil, "hashKey", "/address/street", "saltyA", nil) + expect(bucket).to be_within(0.0000001).of(0) + end + it "can bucket by int value (equivalent to string)" do user = LaunchDarkly::LDContext.create({ key: "userkey", diff --git a/spec/impl/evaluator_prereq_spec.rb b/spec/impl/evaluator_prereq_spec.rb index 87146ac7..3440f916 100644 --- a/spec/impl/evaluator_prereq_spec.rb +++ b/spec/impl/evaluator_prereq_spec.rb @@ -199,4 +199,4 @@ module Impl end end end -end \ No newline at end of file +end From 25f1c4f71e967ea301902fe066de25540a9af857 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Fri, 9 Dec 2022 08:30:47 -0600 Subject: [PATCH 274/292] Implement segment recursion and cycle detection (#220) Clauses in segment rules are now allowed to reference segments. To prevent an infinite recursion edge case, we implement a similar cycle detection mechanism as used on prerequisites. --- Makefile | 1 - lib/ldclient-rb/impl/evaluator.rb | 113 ++++++++++++++++++---------- spec/impl/evaluator_segment_spec.rb | 54 +++++++++++++ 3 files changed, 126 insertions(+), 42 deletions(-) diff --git a/Makefile b/Makefile index 1fcee3b3..280f7d9f 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,6 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log # - various other "evaluation" subtests: These tests require context kind support. # - "events": These test suites will be unavailable until more of the U2C implementation is done. TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ - -skip 'evaluation/parameterized/segment recursion' \ -skip 'evaluation/parameterized/target match/context targets' \ -skip 'evaluation/parameterized/target match/multi-kind' \ -skip 'context type' \ diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index faab6678..967a2eee 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -27,6 +27,17 @@ def initialize(msg, error_kind = EvaluationReason::ERROR_MALFORMED_FLAG) class InvalidReferenceException < EvaluationException end + class EvaluatorState + # @param original_flag [LaunchDarkly::Impl::Model::FeatureFlag] + def initialize(original_flag) + @prereq_stack = EvaluatorStack.new(original_flag.key) + @segment_stack = EvaluatorStack.new(nil) + end + + attr_reader :prereq_stack + attr_reader :segment_stack + end + # # A helper class for managing cycle detection. # @@ -39,7 +50,7 @@ class InvalidReferenceException < EvaluationException # is not created until absolutely necessary. # class EvaluatorStack - # @param original [String] + # @param original [String, nil] def initialize(original) @original = original # @type [Array, nil] @@ -126,11 +137,11 @@ def self.error_result(errorKind, value = nil) # @param context [LaunchDarkly::LDContext] the evaluation context # @return [EvalResult] the evaluation result def evaluate(flag, context) - stack = EvaluatorStack.new(flag.key) + state = EvaluatorState.new(flag) result = EvalResult.new begin - detail = eval_internal(flag, context, result, stack) + detail = eval_internal(flag, context, result, state) rescue EvaluationException => exn LaunchDarkly::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) result.detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(exn.error_kind)) @@ -163,15 +174,15 @@ def self.make_big_segment_ref(segment) # method is visible for testing # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag # @param context [LaunchDarkly::LDContext] the evaluation context - # @param state [EvalResult] - # @param stack [EvaluatorStack] + # @param eval_result [EvalResult] + # @param state [EvaluatorState] # @raise [EvaluationException] - def eval_internal(flag, context, state, stack) + def eval_internal(flag, context, eval_result, state) unless flag.on return flag.off_result end - prereq_failure_result = check_prerequisites(flag, context, state, stack) + prereq_failure_result = check_prerequisites(flag, context, eval_result, state) return prereq_failure_result unless prereq_failure_result.nil? # Check context target matches @@ -185,7 +196,7 @@ def eval_internal(flag, context, state, stack) # Check custom rules flag.rules.each do |rule| - if rule_match_context(rule, context, state) + if rule_match_context(rule, context, eval_result, state) return get_value_for_variation_or_rollout(flag, rule.variation_or_rollout, context, rule.match_results) end end @@ -200,20 +211,20 @@ def eval_internal(flag, context, state, stack) # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag # @param context [LaunchDarkly::LDContext] the evaluation context - # @param state [EvalResult] - # @param stack [EvaluatorStack] + # @param eval_result [EvalResult] + # @param state [EvaluatorState] # @raise [EvaluationException] if a flag prereq cycle is detected - def check_prerequisites(flag, context, state, stack) + def check_prerequisites(flag, context, eval_result, state) return if flag.prerequisites.empty? - stack.push(flag.key) + state.prereq_stack.push(flag.key) begin flag.prerequisites.each do |prerequisite| prereq_ok = true prereq_key = prerequisite.key - if stack.include?(prereq_key) + if state.prereq_stack.include?(prereq_key) raise LaunchDarkly::Impl::EvaluationException.new( "prerequisite relationship to \"#{prereq_key}\" caused a circular reference; this is probably a temporary condition due to an incomplete update" ) @@ -225,15 +236,15 @@ def check_prerequisites(flag, context, state, stack) @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag.key}\"" } prereq_ok = false else - prereq_res = eval_internal(prereq_flag, context, state, stack) + prereq_res = eval_internal(prereq_flag, context, eval_result, state) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag.on || prereq_res.variation_index != prerequisite.variation prereq_ok = false end prereq_eval = PrerequisiteEvalRecord.new(prereq_flag, flag, prereq_res) - state.prereq_evals = [] if state.prereq_evals.nil? - state.prereq_evals.push(prereq_eval) + eval_result.prereq_evals = [] if eval_result.prereq_evals.nil? + eval_result.prereq_evals.push(prereq_eval) end unless prereq_ok @@ -241,7 +252,7 @@ def check_prerequisites(flag, context, state, stack) end end ensure - stack.pop + state.prereq_stack.pop end nil @@ -249,12 +260,12 @@ def check_prerequisites(flag, context, state, stack) # @param rule [LaunchDarkly::Impl::Model::FlagRule] # @param context [LaunchDarkly::LDContext] - # @param state [EvalResult] - # @return [Boolean] + # @param eval_result [EvalResult] + # @param state [EvaluatorState] # @raise [InvalidReferenceException] - def rule_match_context(rule, context, state) + def rule_match_context(rule, context, eval_result, state) rule.clauses.each do |clause| - return false unless clause_match_context(clause, context, state) + return false unless clause_match_context(clause, context, eval_result, state) end true @@ -262,16 +273,22 @@ def rule_match_context(rule, context, state) # @param clause [LaunchDarkly::Impl::Model::Clause] # @param context [LaunchDarkly::LDContext] - # @param state [EvalResult] - # @return [Boolean] + # @param eval_result [EvalResult] + # @param state [EvaluatorState] # @raise [InvalidReferenceException] - def clause_match_context(clause, context, state) + def clause_match_context(clause, context, eval_result, state) # In the case of a segment match operator, we check if the context is in any of the segments, # and possibly negate if clause.op == :segmentMatch result = clause.values.any? { |v| + if state.segment_stack.include?(v) + raise LaunchDarkly::Impl::EvaluationException.new( + "segment rule referencing segment \"#{v}\" caused a circular reference; this is probably a temporary condition due to an incomplete update" + ) + end + segment = @get_segment.call(v) - !segment.nil? && segment_match_context(segment, context, state) + !segment.nil? && segment_match_context(segment, context, eval_result, state) } clause.negate ? !result : result else @@ -335,45 +352,52 @@ def clause_match_context_no_segments(clause, context) # @param segment [LaunchDarkly::Impl::Model::Segment] # @param context [LaunchDarkly::LDContext] + # @param eval_result [EvalResult] + # @param state [EvaluatorState] # @return [Boolean] - def segment_match_context(segment, context, state) - segment.unbounded ? big_segment_match_context(segment, context, state) : simple_segment_match_context(segment, context, true) + def segment_match_context(segment, context, eval_result, state) + return big_segment_match_context(segment, context, eval_result, state) if segment.unbounded + + simple_segment_match_context(segment, context, true, eval_result, state) end # @param segment [LaunchDarkly::Impl::Model::Segment] # @param context [LaunchDarkly::LDContext] + # @param eval_result [EvalResult] + # @param state [EvaluatorState] # @return [Boolean] - def big_segment_match_context(segment, context, state) + def big_segment_match_context(segment, context, eval_result, state) unless segment.generation # Big segment queries can only be done if the generation is known. If it's unset, # that probably means the data store was populated by an older SDK that doesn't know # about the generation property and therefore dropped it from the JSON data. We'll treat # that as a "not configured" condition. - state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED + eval_result.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED return false end - unless state.big_segments_status + unless eval_result.big_segments_status result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(context.key) if result - state.big_segments_membership = result.membership - state.big_segments_status = result.status + eval_result.big_segments_membership = result.membership + eval_result.big_segments_status = result.status else - state.big_segments_membership = nil - state.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED + eval_result.big_segments_membership = nil + eval_result.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED end end segment_ref = Evaluator.make_big_segment_ref(segment) - membership = state.big_segments_membership + membership = eval_result.big_segments_membership included = membership.nil? ? nil : membership[segment_ref] return included unless included.nil? - simple_segment_match_context(segment, context, false) + simple_segment_match_context(segment, context, false, eval_result, state) end # @param segment [LaunchDarkly::Impl::Model::Segment] # @param context [LaunchDarkly::LDContext] # @param use_includes_and_excludes [Boolean] + # @param state [EvaluatorState] # @return [Boolean] - def simple_segment_match_context(segment, context, use_includes_and_excludes) + def simple_segment_match_context(segment, context, use_includes_and_excludes, eval_result, state) if use_includes_and_excludes if EvaluatorHelpers.context_key_in_target_list(context, nil, segment.included) return true @@ -396,8 +420,15 @@ def simple_segment_match_context(segment, context, use_includes_and_excludes) end end - segment.rules.each do |r| - return true if segment_rule_match_context(r, context, segment.key, segment.salt) + rules = segment.rules + state.segment_stack.push(segment.key) unless rules.empty? + + begin + rules.each do |r| + return true if segment_rule_match_context(r, context, segment.key, segment.salt, eval_result, state) + end + ensure + state.segment_stack.pop end false @@ -409,9 +440,9 @@ def simple_segment_match_context(segment, context, use_includes_and_excludes) # @param salt [String] # @return [Boolean] # @raise [InvalidReferenceException] - def segment_rule_match_context(rule, context, segment_key, salt) + def segment_rule_match_context(rule, context, segment_key, salt, eval_result, state) rule.clauses.each do |c| - return false unless clause_match_context_no_segments(c, context) + return false unless clause_match_context(c, context, eval_result, state) end # If the weight is absent, this rule matches diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index a5104e7d..0aac6890 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -138,6 +138,60 @@ def test_segment_match(segment, context) segment = SegmentBuilder.new('segkey').rule(segRule).build expect(test_segment_match(segment, user)).to be false end + + (1..4).each do |depth| + it "can handle segments referencing other segments" do + context = LDContext.with_key("context") + segments = [] + (0...depth).each do |i| + builder = SegmentBuilder.new("segmentkey#{i}") + if i == depth - 1 + builder.included(context.key) + else + clause = Clauses.match_segment("segmentkey#{i + 1}") + builder.rule( + SegmentRuleBuilder.new.clause(clause) + ) + end + + segments << builder.build + end + + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment("segmentkey0")) + + builder = EvaluatorBuilder.new(logger) + segments.each { |segment| builder.with_segment(segment) } + + evaluator = builder.build + result = evaluator.evaluate(flag, context) + expect(result.detail.value).to be(true) + + end + + it "will detect cycles in segments" do + context = LDContext.with_key("context") + segments = [] + (0...depth).each do |i| + clause = Clauses.match_segment("segmentkey#{(i + 1) % depth}") + builder = SegmentBuilder.new("segmentkey#{i}") + builder.rule( + SegmentRuleBuilder.new.clause(clause) + ) + + segments << builder.build + end + + flag = Flags.boolean_flag_with_clauses(Clauses.match_segment("segmentkey0")) + + builder = EvaluatorBuilder.new(logger) + segments.each { |segment| builder.with_segment(segment) } + + evaluator = builder.build + result = evaluator.evaluate(flag, context) + reason = EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG) + expect(result.detail.reason).to eq(reason) + end + end end end end From 4375e0dd7c774567b10bc314059119e2e521d72d Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 13 Dec 2022 13:33:17 -0600 Subject: [PATCH 275/292] Update event logic to support users to context change (#221) --- Makefile | 4 +- lib/ldclient-rb/context.rb | 64 ++++- lib/ldclient-rb/events.rb | 70 +++--- lib/ldclient-rb/impl/context.rb | 14 ++ lib/ldclient-rb/impl/context_filter.rb | 145 +++++++++++ lib/ldclient-rb/impl/event_sender.rb | 2 +- lib/ldclient-rb/impl/event_summarizer.rb | 10 +- lib/ldclient-rb/impl/event_types.rb | 29 ++- lib/ldclient-rb/ldclient.rb | 117 ++++----- lib/ldclient-rb/util.rb | 15 -- spec/context_spec.rb | 20 ++ spec/event_sender_spec.rb | 2 +- spec/events_spec.rb | 296 ++++++++++------------- spec/events_test_util.rb | 4 +- spec/impl/event_summarizer_spec.rb | 32 +-- spec/ldclient_end_to_end_spec.rb | 167 +++++++------ spec/ldclient_evaluation_spec.rb | 28 +-- spec/ldclient_events_spec.rb | 260 ++++++++++---------- spec/mock_components.rb | 2 +- 19 files changed, 710 insertions(+), 571 deletions(-) create mode 100644 lib/ldclient-rb/impl/context_filter.rb diff --git a/Makefile b/Makefile index 280f7d9f..6fdbd3f5 100644 --- a/Makefile +++ b/Makefile @@ -8,9 +8,7 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ -skip 'evaluation/parameterized/target match/context targets' \ -skip 'evaluation/parameterized/target match/multi-kind' \ - -skip 'context type' \ - -skip 'big segments' \ - -skip 'events' + -skip 'big segments' build-contract-tests: @cd contract-tests && bundle _2.2.33_ install diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 8b1532c5..5d7575c4 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -40,15 +40,22 @@ class LDContext # @return [String, nil] Returns the key for this context attr_reader :key + # @return [String, nil] Returns the fully qualified key for this context + attr_reader :fully_qualified_key + # @return [String, nil] Returns the kind for this context attr_reader :kind # @return [String, nil] Returns the error associated with this LDContext if invalid attr_reader :error + # @return [Array] Returns the private attributes associated with this LDContext + attr_reader :private_attributes + # # @private # @param key [String, nil] + # @param fully_qualified_key [String, nil] # @param kind [String, nil] # @param name [String, nil] # @param anonymous [Boolean, nil] @@ -57,13 +64,18 @@ class LDContext # @param error [String, nil] # @param contexts [Array, nil] # - def initialize(key, kind, name = nil, anonymous = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) + def initialize(key, fully_qualified_key, kind, name = nil, anonymous = nil, attributes = nil, private_attributes = nil, error = nil, contexts = nil) @key = key + @fully_qualified_key = fully_qualified_key @kind = kind @name = name @anonymous = anonymous || false @attributes = attributes - @private_attributes = private_attributes + @private_attributes = [] + (private_attributes || []).each do |attribute| + reference = Reference.create(attribute) + @private_attributes << reference if reference.error.nil? + end @error = error @contexts = contexts @is_multi = !contexts.nil? @@ -84,6 +96,41 @@ def valid? @error.nil? end + # + # Returns a hash mapping each context's kind to its key. + # + # @return [Hash] + # + def keys + return {} unless valid? + return Hash[kind, key] unless multi_kind? + + @contexts.map { |c| [c.kind, c.key] }.to_h + end + + # + # Returns an array of context kinds. + # + # @return [Array] + # + def kinds + return [] unless valid? + return [kind] unless multi_kind? + + @contexts.map { |c| c.kind } + end + + # + # Return an array of top level attribute keys (excluding built-in attributes) + # + # @return [Array] + # + def get_custom_attribute_names + return [] if @attributes.nil? + + @attributes.keys + end + # # get_value looks up the value of any attribute of the Context by name. # This includes only attributes that are addressable in evaluations-- not @@ -334,7 +381,11 @@ def self.create_multi(contexts) return contexts[0] if contexts.length == 1 - new(nil, "multi", nil, false, nil, nil, nil, contexts) + full_key = contexts.sort_by(&:kind) + .map { |c| LaunchDarkly::Impl::Context::canonicalize_key_for_kind(c.kind, c.key) } + .join(":") + + new(nil, full_key, "multi", nil, false, nil, nil, nil, contexts) end # @@ -342,7 +393,7 @@ def self.create_multi(contexts) # @return [LDContext] # private_class_method def self.create_invalid_context(error) - new(nil, nil, nil, false, nil, nil, error) + new(nil, nil, nil, nil, false, nil, nil, error) end # @@ -386,7 +437,7 @@ def self.create_multi(contexts) return create_invalid_context(ERR_PRIVATE_NON_ARRAY) end - new(key.to_s, KIND_DEFAULT, name, anonymous, attributes, private_attributes) + new(key.to_s, key.to_s, KIND_DEFAULT, name, anonymous, attributes, private_attributes) end # @@ -433,7 +484,8 @@ def self.create_multi(contexts) end end - new(key.to_s, kind, name, anonymous, attributes, private_attributes) + full_key = kind == LDContext::KIND_DEFAULT ? key.to_s : LaunchDarkly::Impl::Context::canonicalize_key_for_kind(kind, key.to_s) + new(key.to_s, full_key, kind, name, anonymous, attributes, private_attributes) end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 1ad70e95..8e6cc57e 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/context_filter" require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/event_sender" require "ldclient-rb/impl/event_summarizer" @@ -30,7 +31,7 @@ module LaunchDarkly module EventProcessorMethods def record_eval_event( - user, + context, key, version = nil, variation = nil, @@ -62,11 +63,7 @@ def stop end MAX_FLUSH_WORKERS = 5 - USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :ip, :country, :email, :firstName, :lastName, - :avatar, :name ] - private_constant :MAX_FLUSH_WORKERS - private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS # @private class NullEventProcessor @@ -147,7 +144,7 @@ def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test end def record_eval_event( - user, + context, key, version = nil, variation = nil, @@ -158,7 +155,7 @@ def record_eval_event( debug_until = nil, prereq_of = nil ) - post_to_inbox(LaunchDarkly::Impl::EvalEvent.new(timestamp, user, key, version, variation, value, reason, + post_to_inbox(LaunchDarkly::Impl::EvalEvent.new(timestamp, context, key, version, variation, value, reason, default, track_events, debug_until, prereq_of)) end @@ -228,11 +225,11 @@ def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @diagnostic_accumulator = config.diagnostic_opt_out? ? nil : diagnostic_accumulator @event_sender = event_sender - @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) + @context_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) - @deduplicated_users = 0 + @deduplicated_contexts = 0 @events_in_last_batch = 0 outbox = EventBuffer.new(config.capacity, config.logger) @@ -260,7 +257,7 @@ def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) when FlushMessage trigger_flush(outbox, flush_workers) when FlushUsersMessage - @user_keys.clear + @context_keys.clear when DiagnosticEventMessage send_and_reset_diagnostics(outbox, diagnostic_event_workers) when TestSyncMessage @@ -317,8 +314,8 @@ def dispatch_event(event, outbox) # For each user we haven't seen before, we add an index event - unless this is already # an identify event for that user. unless will_add_full_event && @config.inline_users_in_events - if !event.user.nil? && !notice_user(event.user) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) - outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.user)) + if !event.context.nil? && !notice_context(event.context) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) + outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.context)) end end @@ -326,15 +323,16 @@ def dispatch_event(event, outbox) outbox.add_event(debug_event) unless debug_event.nil? end - # Add to the set of users we've noticed, and return true if the user was already known to us. - def notice_user(user) - if user.nil? || !user.has_key?(:key) - true - else - known = @user_keys.add(user[:key].to_s) - @deduplicated_users += 1 if known - known - end + # + # Add to the set of contexts we've noticed, and return true if the context + # was already known to us. + # @param context [LaunchDarkly::LDContext] + # @return [Boolean] + # + def notice_context(context) + known = @context_keys.add(context.fully_qualified_key) + @deduplicated_contexts += 1 if known + known end def should_debug_event(event) @@ -378,8 +376,8 @@ def trigger_flush(outbox, flush_workers) def send_and_reset_diagnostics(outbox, diagnostic_event_workers) return if @diagnostic_accumulator.nil? dropped_count = outbox.get_and_clear_dropped_count - event = @diagnostic_accumulator.create_periodic_event_and_reset(dropped_count, @deduplicated_users, @events_in_last_batch) - @deduplicated_users = 0 + event = @diagnostic_accumulator.create_periodic_event_and_reset(dropped_count, @deduplicated_contexts, @events_in_last_batch) + @deduplicated_contexts = 0 @events_in_last_batch = 0 send_diagnostic_event(event, diagnostic_event_workers) end @@ -456,7 +454,7 @@ class EventOutputFormatter def initialize(config) @inline_users = config.inline_users_in_events - @user_filter = UserFilter.new(config) + @context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) end # Transforms events into the format used for event sending. @@ -482,8 +480,7 @@ def make_output_events(events, summary) out[:variation] = event.variation unless event.variation.nil? out[:version] = event.version unless event.version.nil? out[:prereqOf] = event.prereq_of unless event.prereq_of.nil? - set_opt_context_kind(out, event.user) - set_user_or_user_key(out, event.user) + out[:contextKeys] = event.context.keys out[:reason] = event.reason unless event.reason.nil? out @@ -491,8 +488,8 @@ def make_output_events(events, summary) { kind: IDENTIFY_KIND, creationDate: event.timestamp, - key: event.user[:key].to_s, - user: process_user(event.user), + key: event.context.fully_qualified_key, + context: @context_filter.filter(event.context), } when LaunchDarkly::Impl::CustomEvent @@ -502,16 +499,15 @@ def make_output_events(events, summary) key: event.key, } out[:data] = event.data unless event.data.nil? - set_user_or_user_key(out, event.user) + out[:contextKeys] = event.context.keys out[:metricValue] = event.metric_value unless event.metric_value.nil? - set_opt_context_kind(out, event.user) out when LaunchDarkly::Impl::IndexEvent { kind: INDEX_KIND, creationDate: event.timestamp, - user: process_user(event.user), + context: @context_filter.filter(event.context), } when LaunchDarkly::Impl::DebugEvent @@ -520,14 +516,13 @@ def make_output_events(events, summary) kind: DEBUG_KIND, creationDate: original.timestamp, key: original.key, - user: process_user(original.user), + context: @context_filter.filter(original.context), value: original.value, } out[:default] = original.default unless original.default.nil? out[:variation] = original.variation unless original.variation.nil? out[:version] = original.version unless original.version.nil? out[:prereqOf] = original.prereq_of unless original.prereq_of.nil? - set_opt_context_kind(out, original.user) out[:reason] = original.reason unless original.reason.nil? out @@ -556,7 +551,7 @@ def make_output_events(events, summary) counters.push(c) end end - flags[flagKey] = { default: flagInfo.default, counters: counters } + flags[flagKey] = { default: flagInfo.default, counters: counters, contextKinds: flagInfo.context_kinds.to_a } end { kind: SUMMARY_KIND, @@ -572,16 +567,11 @@ def make_output_events(events, summary) private def set_user_or_user_key(out, user) if @inline_users - out[:user] = process_user(user) + out[:user] = @context_filter.filter(user) else key = user[:key] out[:userKey] = key.is_a?(String) ? key : key.to_s end end - - private def process_user(user) - filtered = @user_filter.transform_user_props(user) - Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS) - end end end diff --git a/lib/ldclient-rb/impl/context.rb b/lib/ldclient-rb/impl/context.rb index ed80f032..dea91e57 100644 --- a/lib/ldclient-rb/impl/context.rb +++ b/lib/ldclient-rb/impl/context.rb @@ -75,6 +75,20 @@ def self.validate_anonymous(anonymous, allow_nil) ERR_ANONYMOUS_NON_BOOLEAN end + + # + # @param kind [String] + # @param key [String] + # @return [String] + # + def self.canonicalize_key_for_kind(kind, key) + # When building a FullyQualifiedKey, ':' and '%' are percent-escaped; + # we do not use a full URL-encoding function because implementations of + # this are inconsistent across platforms. + encoded = key.gsub("%", "%25").gsub(":", "%3A") + + "#{kind}:#{encoded}" + end end end end diff --git a/lib/ldclient-rb/impl/context_filter.rb b/lib/ldclient-rb/impl/context_filter.rb new file mode 100644 index 00000000..510fe28c --- /dev/null +++ b/lib/ldclient-rb/impl/context_filter.rb @@ -0,0 +1,145 @@ +module LaunchDarkly + module Impl + class ContextFilter + # + # @param all_attributes_private [Boolean] + # @param private_attributes [Array] + # + def initialize(all_attributes_private, private_attributes) + @all_attributes_private = all_attributes_private + + @private_attributes = [] + private_attributes.each do |attribute| + reference = LaunchDarkly::Reference.create(attribute) + @private_attributes << reference if reference.error.nil? + end + end + + # + # Return a hash representation of the provided context with attribute + # redaction applied. + # + # @param context [LaunchDarkly::LDContext] + # @return [Hash] + # + def filter(context) + return filter_single_context(context, true) unless context.multi_kind? + + filtered = {kind: 'multi'} + (0...context.individual_context_count).each do |i| + c = context.individual_context(i) + next if c.nil? + + filtered[c.kind] = filter_single_context(c, false) + end + + filtered + end + + # + # Apply redaction rules for a single context. + # + # @param context [LaunchDarkly::LDContext] + # @param include_kind [Boolean] + # @return [Hash] + # + private def filter_single_context(context, include_kind) + filtered = {key: context.key} + + filtered[:kind] = context.kind if include_kind + filtered[:anonymous] = true if context.get_value(:anonymous) + + redacted = [] + private_attributes = @private_attributes.concat(context.private_attributes) + + name = context.get_value(:name) + if !name.nil? && !check_whole_attribute_private(:name, private_attributes, redacted) + filtered[:name] = name + end + + context.get_custom_attribute_names.each do |attribute| + unless check_whole_attribute_private(attribute, private_attributes, redacted) + value = context.get_value(attribute) + filtered[attribute] = redact_json_value(nil, attribute, value, private_attributes, redacted) + end + end + + filtered[:_meta] = {redactedAttributes: redacted} unless redacted.empty? + + filtered + end + + # + # Check if an entire attribute should be redacted. + # + # @param attribute [Symbol] + # @param private_attributes [Array] + # @param redacted [Array] + # @return [Boolean] + # + private def check_whole_attribute_private(attribute, private_attributes, redacted) + if @all_attributes_private + redacted << attribute + return true + end + + private_attributes.each do |private_attribute| + if private_attribute.component(0) == attribute && private_attribute.depth == 1 + redacted << attribute + return true + end + end + + false + end + + # + # Apply redaction rules to the provided value. + # + # @param parent_path [Array, nil] + # @param name [String] + # @param value [any] + # @param private_attributes [Array] + # @param redacted [Array] + # @return [any] + # + private def redact_json_value(parent_path, name, value, private_attributes, redacted) + return value unless value.is_a?(Hash) + + ret = {} + current_path = parent_path.clone || [] + current_path << name + + value.each do |k, v| + was_redacted = false + private_attributes.each do |private_attribute| + next unless private_attribute.depth == (current_path.count + 1) + + component = private_attribute.component(current_path.count) + next unless component == k + + match = true + (0...current_path.count).each do |i| + unless private_attribute.component(i) == current_path[i] + match = false + break + end + end + + if match + redacted << private_attribute.raw_path.to_sym + was_redacted = true + break + end + end + + unless was_redacted + ret[k] = redact_json_value(current_path, k, v, private_attributes, redacted) + end + end + + ret + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index 49552f39..76395a1c 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -8,7 +8,7 @@ module Impl EventSenderResult = Struct.new(:success, :must_shutdown, :time_from_server) class EventSender - CURRENT_SCHEMA_VERSION = 3 + CURRENT_SCHEMA_VERSION = 4 DEFAULT_RETRY_INTERVAL = 1 def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETRY_INTERVAL) diff --git a/lib/ldclient-rb/impl/event_summarizer.rb b/lib/ldclient-rb/impl/event_summarizer.rb index 8a66a4d9..32703e69 100644 --- a/lib/ldclient-rb/impl/event_summarizer.rb +++ b/lib/ldclient-rb/impl/event_summarizer.rb @@ -1,10 +1,11 @@ require "ldclient-rb/impl/event_types" +require "set" module LaunchDarkly module Impl EventSummary = Struct.new(:start_date, :end_date, :counters) - EventSummaryFlagInfo = Struct.new(:default, :versions) + EventSummaryFlagInfo = Struct.new(:default, :versions, :context_kinds) EventSummaryFlagVariationCounter = Struct.new(:value, :count) @@ -26,20 +27,25 @@ def summarize_event(event) counters_for_flag = @counters[event.key] if counters_for_flag.nil? - counters_for_flag = EventSummaryFlagInfo.new(event.default, Hash.new) + counters_for_flag = EventSummaryFlagInfo.new(event.default, Hash.new, Set.new) @counters[event.key] = counters_for_flag end + counters_for_flag_version = counters_for_flag.versions[event.version] if counters_for_flag_version.nil? counters_for_flag_version = Hash.new counters_for_flag.versions[event.version] = counters_for_flag_version end + + counters_for_flag.context_kinds.merge(event.context.kinds) + variation_counter = counters_for_flag_version[event.variation] if variation_counter.nil? counters_for_flag_version[event.variation] = EventSummaryFlagVariationCounter.new(event.value, 1) else variation_counter.count = variation_counter.count + 1 end + time = event.timestamp unless time.nil? @start_date = time if @start_date == 0 || time < @start_date diff --git a/lib/ldclient-rb/impl/event_types.rb b/lib/ldclient-rb/impl/event_types.rb index d2152767..1be03eb8 100644 --- a/lib/ldclient-rb/impl/event_types.rb +++ b/lib/ldclient-rb/impl/event_types.rb @@ -1,20 +1,23 @@ module LaunchDarkly module Impl class Event - def initialize(timestamp, user) + # @param timestamp [Integer] + # @param context [LaunchDarkly::LDContext] + def initialize(timestamp, context) @timestamp = timestamp - @user = user + @context = context end + # @return [Integer] attr_reader :timestamp - attr_reader :kind - attr_reader :user + # @return [LaunchDarkly::LDContext] + attr_reader :context end class EvalEvent < Event - def initialize(timestamp, user, key, version = nil, variation = nil, value = nil, reason = nil, default = nil, + def initialize(timestamp, context, key, version = nil, variation = nil, value = nil, reason = nil, default = nil, track_events = false, debug_until = nil, prereq_of = nil) - super(timestamp, user) + super(timestamp, context) @key = key @version = version @variation = variation @@ -39,14 +42,14 @@ def initialize(timestamp, user, key, version = nil, variation = nil, value = nil end class IdentifyEvent < Event - def initialize(timestamp, user) - super(timestamp, user) + def initialize(timestamp, context) + super(timestamp, context) end end class CustomEvent < Event - def initialize(timestamp, user, key, data = nil, metric_value = nil) - super(timestamp, user) + def initialize(timestamp, context, key, data = nil, metric_value = nil) + super(timestamp, context) @key = key @data = data unless data.nil? @metric_value = metric_value unless metric_value.nil? @@ -58,14 +61,14 @@ def initialize(timestamp, user, key, data = nil, metric_value = nil) end class IndexEvent < Event - def initialize(timestamp, user) - super(timestamp, user) + def initialize(timestamp, context) + super(timestamp, context) end end class DebugEvent < Event def initialize(eval_event) - super(eval_event.timestamp, eval_event.user) + super(eval_event.timestamp, eval_event.context) @eval_event = eval_event end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 688f6136..3cdd8f27 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -153,31 +153,7 @@ def initialized? end # - # TODO: TKTK - # - # Determines the variation of a feature flag to present to a user. - # - # At a minimum, the user hash should contain a `:key`, which should be the unique - # identifier for your user (or, for an anonymous user, a session identifier or - # cookie). - # - # Other supported user attributes include IP address, country code, and an arbitrary hash of - # custom attributes. For more about the supported user properties and how they work in - # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/home/flags/targeting-users). - # - # The optional `:privateAttributeNames` user property allows you to specify a list of - # attribute names that should not be sent back to LaunchDarkly. - # [Private attributes](https://docs.launchdarkly.com/home/users/attributes#creating-private-user-attributes) - # can also be configured globally in {Config}. - # - # @example Basic user hash - # {key: "my-user-id"} - # - # @example More complete user hash - # {key: "my-user-id", ip: "127.0.0.1", country: "US", custom: {customer_rank: 1000}} - # - # @example User with a private attribute - # {key: "my-user-id", email: "email@example.com", privateAttributeNames: ["email"]} + # Determines the variation of a feature flag to present for a context. # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard @@ -185,16 +161,14 @@ def initialized? # @param default the default value of the flag; this is used if there is an error # condition making it impossible to find or evaluate the flag # - # @return the variation to show the user, or the default value if there's an an error + # @return the variation for the provided context, or the default value if there's an an error # def variation(key, context, default) evaluate_internal(key, context, default, false).value end # - # TODO: TKTK - # - # Determines the variation of a feature flag for a user, like {#variation}, but also + # Determines the variation of a feature flag for a context, like {#variation}, but also # provides additional information about how this value was calculated. # # The return value of `variation_detail` is an {EvaluationDetail} object, which has @@ -221,27 +195,32 @@ def variation_detail(key, context, default) end # - # Registers the user. This method simply creates an analytics event containing the user - # properties, so that LaunchDarkly will know about that user if it does not already. + # Registers the context. This method simply creates an analytics event containing the context + # properties, so that LaunchDarkly will know about that context if it does not already. # - # Calling {#variation} or {#variation_detail} also sends the user information to + # Calling {#variation} or {#variation_detail} also sends the context information to # LaunchDarkly (if events are enabled), so you only need to use {#identify} if you - # want to identify the user without evaluating a flag. + # want to identify the context without evaluating a flag. # # Note that event delivery is asynchronous, so the event may not actually be sent # until later; see {#flush}. # - # @param user [Hash] The user to register; this can have all the same user properties - # described in {#variation} + # @param context [Hash, LDContext] a hash or object describing the context to register # @return [void] # - def identify(user) - if !user || user[:key].nil? || user[:key].empty? - @config.logger.warn("Identify called with nil user or empty user key!") + def identify(context) + context = LaunchDarkly::Impl::Context.make_context(context) + unless context.valid? + @config.logger.warn("Identify called with invalid context: #{context.error}") + return + end + + if context.key == "" + @config.logger.warn("Identify called with empty key") return end - sanitize_user(user) - @event_processor.record_identify_event(user) + + @event_processor.record_identify_event(context) end # @@ -257,8 +236,7 @@ def identify(user) # for the latest status. # # @param event_name [String] The name of the event - # @param user [Hash] The user to register; this can have all the same user properties - # described in {#variation} + # @param context [Hash, LDContext] a hash or object describing the context to track # @param data [Hash] An optional hash containing any additional data associated with the event # @param metric_value [Number] A numeric value used by the LaunchDarkly experimentation # feature in numeric custom metrics. Can be omitted if this event is used by only @@ -266,13 +244,14 @@ def identify(user) # for Data Export. # @return [void] # - def track(event_name, user, data = nil, metric_value = nil) - if !user || user[:key].nil? - @config.logger.warn("Track called with nil user or nil user key!") + def track(event_name, context, data = nil, metric_value = nil) + context = LaunchDarkly::Impl::Context.make_context(context) + unless context.valid? + @config.logger.warn("Track called with invalid context: #{context.error}") return end - sanitize_user(user) - @event_processor.record_custom_event(user, event_name, data, metric_value) + + @event_processor.record_custom_event(context, event_name, data, metric_value) end # @@ -413,8 +392,7 @@ def evaluate_internal(key, context, default, with_reasons) else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) - # TODO: Address when working on u2c events - # record_unknown_flag_eval(key, context, default, detail.reason, with_reasons) + record_unknown_flag_eval(key, context, default, detail.reason, with_reasons) return detail end end @@ -424,8 +402,7 @@ def evaluate_internal(key, context, default, with_reasons) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_FLAG_NOT_FOUND, default) - # TODO: Address when working on u2c events - # record_unknown_flag_eval(key, context, default, detail.reason, with_reasons) + record_unknown_flag_eval(key, context, default, detail.reason, with_reasons) return detail end @@ -433,30 +410,27 @@ def evaluate_internal(key, context, default, with_reasons) res = @evaluator.evaluate(feature, context) unless res.prereq_evals.nil? res.prereq_evals.each do |prereq_eval| - # TODO: Address when working on u2c events - # record_prereq_flag_eval(prereq_eval.prereq_flag, prereq_eval.prereq_of_flag, context, prereq_eval.detail, with_reasons) + record_prereq_flag_eval(prereq_eval.prereq_flag, prereq_eval.prereq_of_flag, context, prereq_eval.detail, with_reasons) end end detail = res.detail if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end - # TODO: Address when working on u2c events - # record_flag_eval(feature, context, detail, default, with_reasons) + record_flag_eval(feature, context, detail, default, with_reasons) detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) - # TODO: Address when working on u2c events - # record_flag_eval_error(feature, context, default, detail.reason, with_reasons) + record_flag_eval_error(feature, context, default, detail.reason, with_reasons) detail end end - private def record_flag_eval(flag, user, detail, default, with_reasons) + private def record_flag_eval(flag, context, detail, default, with_reasons) add_experiment_data = experiment?(flag, detail.reason) @event_processor.record_eval_event( - user, + context, flag[:key], flag[:version], detail.variation_index, @@ -469,10 +443,10 @@ def evaluate_internal(key, context, default, with_reasons) ) end - private def record_prereq_flag_eval(prereq_flag, prereq_of_flag, user, detail, with_reasons) + private def record_prereq_flag_eval(prereq_flag, prereq_of_flag, context, detail, with_reasons) add_experiment_data = experiment?(prereq_flag, detail.reason) @event_processor.record_eval_event( - user, + context, prereq_flag[:key], prereq_flag[:version], detail.variation_index, @@ -485,13 +459,20 @@ def evaluate_internal(key, context, default, with_reasons) ) end - private def record_flag_eval_error(flag, user, default, reason, with_reasons) - @event_processor.record_eval_event(user, flag[:key], flag[:version], nil, default, with_reasons ? reason : nil, default, + private def record_flag_eval_error(flag, context, default, reason, with_reasons) + @event_processor.record_eval_event(context, flag[:key], flag[:version], nil, default, with_reasons ? reason : nil, default, flag[:trackEvents], flag[:debugEventsUntilDate], nil) end - private def record_unknown_flag_eval(flag_key, user, default, reason, with_reasons) - @event_processor.record_eval_event(user, flag_key, nil, nil, default, with_reasons ? reason : nil, default, + # + # @param flag_key [String] + # @param context [LaunchDarkly::LDContext] + # @param default [any] + # @param reason [LaunchDarkly::EvaluationReason] + # @param with_reasons [Boolean] + # + private def record_unknown_flag_eval(flag_key, context, default, reason, with_reasons) + @event_processor.record_eval_event(context, flag_key, nil, nil, default, with_reasons ? reason : nil, default, false, nil, nil) end @@ -514,12 +495,6 @@ def evaluate_internal(key, context, default, with_reasons) end false end - - private def sanitize_user(user) - if user[:key] - user[:key] = user[:key].to_s - end - end end # diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 343bbc98..df4ae191 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -4,21 +4,6 @@ module LaunchDarkly # @private module Util - def self.stringify_attrs(hash, attrs) - return hash if hash.nil? - ret = hash - changed = false - attrs.each do |attr| - value = hash[attr] - if !value.nil? && !value.is_a?(String) - ret = hash.clone unless changed - ret[attr] = value.to_s - changed = true - end - end - ret - end - def self.new_http_client(uri_s, config) http_client_options = {} if config.socket_factory diff --git a/spec/context_spec.rb b/spec/context_spec.rb index cf052f8d..729f49d8 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -56,6 +56,10 @@ expect(subject.create({ key: "", name: 0 }).valid?).to be false end + it "creates the correct fully qualified key" do + expect(subject.create({ key: "user-key" }).fully_qualified_key).to eq("user-key") + end + it "requires privateAttributeNames to be an array" do context = { key: "user-key", @@ -137,6 +141,11 @@ } expect(subject.create(context).valid?).to be false end + + it "creates the correct fully qualified key" do + expect(subject.create({ key: "user-key", kind: "user" }).fully_qualified_key).to eq("user-key") + expect(subject.create({ key: "org-key", kind: "org" }).fully_qualified_key).to eq("org:org-key") + end end describe "multi-kind contexts" do @@ -193,6 +202,17 @@ expect(invalid_context.valid?).to be false expect(multi_context.valid?).to be false end + + it "creates the correct fully qualified key" do + user_context = subject.create({ key: "a-user-key" }) + org_context = subject.create({ key: "b-org-key", kind: "org" }) + user_first = subject.create_multi([user_context, org_context]) + org_first = subject.create_multi([org_context, user_context]) + + # Verify we are sorting contexts by kind when generating the canonical key + expect(user_first.fully_qualified_key).to eq("org:b-org-key:user:a-user-key") + expect(org_first.fully_qualified_key).to eq("org:b-org-key:user:a-user-key") + end end end diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 6b7e323f..a8325ff1 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -43,7 +43,7 @@ def with_sender_and_server "authorization" => [ sdk_key ], "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], - "x-launchdarkly-event-schema" => [ "3" ], + "x-launchdarkly-event-schema" => [ "4" ], "x-launchdarkly-tags" => [ "application-id/id application-version/version" ], "connection" => [ "Keep-Alive" ], }) diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 1e875074..adfe7e26 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -11,12 +11,7 @@ let(:starting_timestamp) { 1000 } let(:default_config_opts) { { diagnostic_opt_out: true, logger: $null_log } } let(:default_config) { LaunchDarkly::Config.new(default_config_opts) } - let(:user) { { key: "userkey", name: "Red" } } - let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } - let(:numeric_user) { { key: 1, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, - avatar: 8, name: 9, anonymous: false, custom: { age: 99 } } } - let(:stringified_numeric_user) { { key: '1', ip: '3', country: '4', email: '5', firstName: '6', - lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } + let(:context) { LaunchDarkly::LDContext.create({ kind: "user", key: "userkey", name: "Red" }) } def with_processor_and_sender(config) sender = FakeEventSender.new @@ -38,112 +33,61 @@ def with_processor_and_sender(config) it "queues identify event" do with_processor_and_sender(default_config) do |ep, sender| - ep.record_identify_event(user) + ep.record_identify_event(context) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(eq(identify_event(user))) + expect(output).to contain_exactly(eq(identify_event(default_config, context))) end end - it "filters user in identify event" do + it "filters context in identify event" do config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) with_processor_and_sender(config) do |ep, sender| - ep.record_identify_event(user) + ep.record_identify_event(context) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(eq(identify_event(filtered_user))) - end - end - - it "stringifies built-in user attributes in identify event" do - with_processor_and_sender(default_config) do |ep, sender| - ep.record_identify_event(numeric_user) - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(eq(identify_event(stringified_numeric_user))) + expect(output).to contain_exactly(eq(identify_event(config, context))) end end it "queues individual feature event with index event" do with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), - eq(feature_event(flag, user, 1, 'value')), + eq(index_event(default_config, context)), + eq(feature_event(flag, context, 1, 'value')), include(:kind => "summary") ) end end - it "filters user in index event" do + it "filters context in index event" do config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - eq(index_event(filtered_user)), - eq(feature_event(flag, user, 1, 'value')), - include(:kind => "summary") - ) - end - end - - it "stringifies built-in user attributes in index event" do - with_processor_and_sender(default_config) do |ep, sender| - flag = { key: "flagkey", version: 11 } - ep.record_eval_event(numeric_user, 'flagkey', 11, 1, 'value', nil, nil, true) - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - eq(index_event(stringified_numeric_user)), - eq(feature_event(flag, stringified_numeric_user, 1, 'value')), - include(:kind => "summary") - ) - end - end - - it "can include inline user in feature event" do - config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) - with_processor_and_sender(config) do |ep, sender| - flag = { key: "flagkey", version: 11 } - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - eq(feature_event(flag, user, 1, 'value', true)), - include(:kind => "summary") - ) - end - end - - it "stringifies built-in user attributes in feature event" do - config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) - with_processor_and_sender(config) do |ep, sender| - flag = { key: "flagkey", version: 11 } - ep.record_eval_event(numeric_user, 'flagkey', 11, 1, 'value', nil, nil, true) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(feature_event(flag, stringified_numeric_user, 1, 'value', true)), + eq(index_event(config, context)), + eq(feature_event(flag, context, 1, 'value')), include(:kind => "summary") ) end end - it "filters user in feature event" do + it "filters context in feature event" do config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(feature_event(flag, filtered_user, 1, 'value', true)), + eq(feature_event(flag, context, 1, 'value', true)), include(:kind => "summary") ) end @@ -152,12 +96,11 @@ def with_processor_and_sender(config) it "still generates index event if inline_users is true but feature event was not tracked" do config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| - flag = { key: "flagkey", version: 11 } - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, false) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), + eq(index_event(config, context)), include(:kind => "summary") ) end @@ -167,12 +110,12 @@ def with_processor_and_sender(config) with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false, future_time) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, false, future_time) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), - eq(debug_event(flag, user, 1, 'value')), + eq(index_event(default_config, context)), + eq(debug_event(default_config, flag, context, 1, 'value')), include(:kind => "summary") ) end @@ -182,13 +125,13 @@ def with_processor_and_sender(config) with_processor_and_sender(default_config) do |ep, sender| flag = { key: "flagkey", version: 11 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, true, future_time) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, true, future_time) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), - eq(feature_event(flag, user, 1, 'value')), - eq(debug_event(flag, user, 1, 'value')), + eq(index_event(default_config, context)), + eq(feature_event(flag, context, 1, 'value')), + eq(debug_event(default_config, flag, context, 1, 'value')), include(:kind => "summary") ) end @@ -202,14 +145,14 @@ def with_processor_and_sender(config) # Send and flush an event we don't care about, just to set the last server time sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) - ep.record_identify_event(user) + ep.record_identify_event(context) flush_and_get_events(ep, sender) # Now send an event with debug mode on, with a "debug until" time that is further in # the future than the server time, but in the past compared to the client. flag = { key: "flagkey", version: 11 } debug_until = (server_time.to_f * 1000).to_i + 1000 - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false, debug_until) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, false, debug_until) # Should get a summary event only, not a full feature event output = flush_and_get_events(ep, sender) @@ -226,14 +169,14 @@ def with_processor_and_sender(config) # Send and flush an event we don't care about, just to set the last server time sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) - ep.record_identify_event(user) + ep.record_identify_event(context) flush_and_get_events(ep, sender) # Now send an event with debug mode on, with a "debug until" time that is further in # the future than the server time, but in the past compared to the client. flag = { key: "flagkey", version: 11 } debug_until = (server_time.to_f * 1000).to_i - 1000 - ep.record_eval_event(user, 'flagkey', 11, 1, 'value', nil, nil, false, debug_until) + ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, false, debug_until) # Should get a summary event only, not a full feature event output = flush_and_get_events(ep, sender) @@ -243,19 +186,19 @@ def with_processor_and_sender(config) end end - it "generates only one index event for multiple events with same user" do + it "generates only one index event for multiple events with same context" do with_processor_and_sender(default_config) do |ep, sender| flag1 = { key: "flagkey1", version: 11 } flag2 = { key: "flagkey2", version: 22 } future_time = (Time.now.to_f * 1000).to_i + 1000000 - ep.record_eval_event(user, 'flagkey1', 11, 1, 'value', nil, nil, true) - ep.record_eval_event(user, 'flagkey2', 22, 1, 'value', nil, nil, true) + ep.record_eval_event(context, 'flagkey1', 11, 1, 'value', nil, nil, true) + ep.record_eval_event(context, 'flagkey2', 22, 1, 'value', nil, nil, true) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), - eq(feature_event(flag1, user, 1, 'value', false, starting_timestamp)), - eq(feature_event(flag2, user, 1, 'value', false, starting_timestamp + 1)), + eq(index_event(default_config, context)), + eq(feature_event(flag1, context, 1, 'value', false, starting_timestamp)), + eq(feature_event(flag2, context, 1, 'value', false, starting_timestamp + 1)), include(:kind => "summary") ) end @@ -263,27 +206,26 @@ def with_processor_and_sender(config) it "summarizes non-tracked events" do with_processor_and_sender(default_config) do |ep, sender| - flag1 = { key: "flagkey1", version: 11 } - flag2 = { key: "flagkey2", version: 22 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - ep.record_eval_event(user, 'flagkey1', 11, 1, 'value1', nil, 'default1', false) - ep.record_eval_event(user, 'flagkey2', 22, 2, 'value2', nil, 'default2', false) + ep.record_eval_event(context, 'flagkey1', 11, 1, 'value1', nil, 'default1', false) + ep.record_eval_event(context, 'flagkey2', 22, 2, 'value2', nil, 'default2', false) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), + eq(index_event(default_config, context)), eq({ kind: "summary", startDate: starting_timestamp, endDate: starting_timestamp + 1, features: { flagkey1: { + contextKinds: ["user"], default: "default1", counters: [ { version: 11, variation: 1, value: "value1", count: 1 }, ], }, flagkey2: { + contextKinds: ["user"], default: "default2", counters: [ { version: 22, variation: 2, value: "value2", count: 1 }, @@ -295,72 +237,48 @@ def with_processor_and_sender(config) end end - it "queues custom event with user" do + it "queues custom event with context" do with_processor_and_sender(default_config) do |ep, sender| - ep.record_custom_event(user, 'eventkey', { thing: 'stuff' }, 1.5) + ep.record_custom_event(context, 'eventkey', { thing: 'stuff' }, 1.5) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(index_event(user)), - eq(custom_event(user, 'eventkey', { thing: 'stuff' }, 1.5)) + eq(index_event(default_config, context)), + eq(custom_event(context, 'eventkey', { thing: 'stuff' }, 1.5)) ) end end - it "can include inline user in custom event" do - config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) - with_processor_and_sender(config) do |ep, sender| - ep.record_custom_event(user, 'eventkey') - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - eq(custom_event(user, 'eventkey', nil, nil, true)) - ) - end - end - - it "filters user in custom event" do + it "filters context in custom event" do config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) with_processor_and_sender(config) do |ep, sender| - ep.record_custom_event(user, 'eventkey') + ep.record_custom_event(context, 'eventkey') output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(custom_event(filtered_user, 'eventkey', nil, nil, true)) - ) - end - end - - it "stringifies built-in user attributes in custom event" do - config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) - with_processor_and_sender(config) do |ep, sender| - ep.record_custom_event(numeric_user, 'eventkey', nil, nil) - - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - eq(custom_event(stringified_numeric_user, 'eventkey', nil, nil, true)) + eq(custom_event(context, 'eventkey', nil, nil, true)) ) end end it "treats nil value for custom the same as an empty hash" do with_processor_and_sender(default_config) do |ep, sender| - user_with_nil_custom = { key: "userkey", custom: nil } + user_with_nil_custom = LaunchDarkly::LDContext.create({ key: "userkey", custom: nil }) ep.record_identify_event(user_with_nil_custom) output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly(eq(identify_event(user_with_nil_custom))) + expect(output).to contain_exactly(eq(identify_event(default_config, user_with_nil_custom))) end end it "does a final flush when shutting down" do with_processor_and_sender(default_config) do |ep, sender| - ep.record_identify_event(user) + ep.record_identify_event(context) ep.stop output = sender.analytics_payloads.pop - expect(output).to contain_exactly(eq(identify_event(user))) + expect(output).to contain_exactly(eq(identify_event(default_config, context))) end end @@ -375,10 +293,10 @@ def with_processor_and_sender(config) it "stops posting events after unrecoverable error" do with_processor_and_sender(default_config) do |ep, sender| sender.result = LaunchDarkly::Impl::EventSenderResult.new(false, true, nil) - e = ep.record_identify_event(user) + e = ep.record_identify_event(context) flush_and_get_events(ep, sender) - ep.record_identify_event(user) + ep.record_identify_event(context) ep.flush ep.wait_until_inactive expect(sender.analytics_payloads.empty?).to be true @@ -432,7 +350,7 @@ def with_diagnostic_processor_and_sender(config) init_event = sender.diagnostic_payloads.pop 3.times do - ep.record_identify_event(user) + ep.record_identify_event(context) end flush_and_get_events(ep, sender) @@ -445,13 +363,13 @@ def with_diagnostic_processor_and_sender(config) end end - it "counts deduplicated users" do + it "counts deduplicated contexts" do with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| - init_event = sender.diagnostic_payloads.pop + sender.diagnostic_payloads.pop - ep.record_custom_event(user, 'event1') - ep.record_custom_event(user, 'event2') - events = flush_and_get_events(ep, sender) + ep.record_custom_event(context, 'event1') + ep.record_custom_event(context, 'event2') + flush_and_get_events(ep, sender) periodic_event = sender.diagnostic_payloads.pop expect(periodic_event).to include({ @@ -462,66 +380,102 @@ def with_diagnostic_processor_and_sender(config) end end - def index_event(user, timestamp = starting_timestamp) - { + # + # @param config [LaunchDarkly::Config] + # @param context [LaunchDarkly::LDContext] + # @param timestamp [Integer] + # @return [Hash] + # + def index_event(config, context, timestamp = starting_timestamp) + context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + out = { kind: "index", creationDate: timestamp, - user: user, + context: context_filter.filter(context), } + JSON.parse(out.to_json, symbolize_names: true) end - def identify_event(user, timestamp = starting_timestamp) - { + # + # @param config [LaunchDarkly::Config] + # @param context [LaunchDarkly::LDContext] + # @param timestamp [Integer] + # @return [Hash] + # + def identify_event(config, context, timestamp = starting_timestamp) + context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + out = { kind: "identify", creationDate: timestamp, - key: user[:key], - user: user, + key: context.fully_qualified_key, + context: context_filter.filter(context), } - end - - def feature_event(flag, user, variation, value, inline_user = false, timestamp = starting_timestamp) + JSON.parse(out.to_json, symbolize_names: true) + end + + # + # @param flag [Hash] + # @param context [LaunchDarkly::LDContext] + # @param variation [Integer] + # @param value [any] + # @param timestamp [Integer] + # @return [Hash] + # + def feature_event(flag, context, variation, value, inline_user = false, timestamp = starting_timestamp) out = { kind: 'feature', creationDate: timestamp, + contextKeys: context.keys, key: flag[:key], variation: variation, version: flag[:version], value: value, } - if inline_user - out[:user] = user - else - out[:userKey] = user[:key] - end - out - end - - def debug_event(flag, user, variation, value, timestamp = starting_timestamp) - { + JSON.parse(out.to_json, symbolize_names: true) + end + + # + # @param config [LaunchDarkly::Config] + # @param flag [Hash] + # @param context [LaunchDarkly::LDContext] + # @param variation [Integer] + # @param value [any] + # @param timestamp [Integer] + # @return [Hash] + # + def debug_event(config, flag, context, variation, value, timestamp = starting_timestamp) + context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + out = { kind: 'debug', creationDate: timestamp, key: flag[:key], variation: variation, version: flag[:version], value: value, - user: user, + context: context_filter.filter(context), } - end - - def custom_event(user, key, data, metric_value, inline_user = false, timestamp = starting_timestamp) + JSON.parse(out.to_json, symbolize_names: true) + end + + # + # @param context [LaunchDarkly::LDContext] + # @param key [String] + # @param data [any] + # @param metric_value [any] + # @param timestamp [Integer] + # @return [Hash] + # + def custom_event(context, key, data, metric_value, inline_user = false, timestamp = starting_timestamp) out = { kind: "custom", creationDate: timestamp, + contextKeys: context.keys, key: key, } out[:data] = data unless data.nil? - if inline_user - out[:user] = user - else - out[:userKey] = user[:key] - end out[:metricValue] = metric_value unless metric_value.nil? - out + + JSON.parse(out.to_json, symbolize_names: true) end def flush_and_get_events(ep, sender) diff --git a/spec/events_test_util.rb b/spec/events_test_util.rb index 45c27795..90fab2a8 100644 --- a/spec/events_test_util.rb +++ b/spec/events_test_util.rb @@ -1,8 +1,8 @@ require "ldclient-rb/impl/event_types" -def make_eval_event(timestamp, user, key, version = nil, variation = nil, value = nil, reason = nil, +def make_eval_event(timestamp, context, key, version = nil, variation = nil, value = nil, reason = nil, default = nil, track_events = false, debug_until = nil, prereq_of = nil) - LaunchDarkly::Impl::EvalEvent.new(timestamp, user, key, version, variation, value, reason, + LaunchDarkly::Impl::EvalEvent.new(timestamp, context, key, version, variation, value, reason, default, track_events, debug_until, prereq_of) end diff --git a/spec/impl/event_summarizer_spec.rb b/spec/impl/event_summarizer_spec.rb index 2d824d91..d3eb953a 100644 --- a/spec/impl/event_summarizer_spec.rb +++ b/spec/impl/event_summarizer_spec.rb @@ -2,18 +2,19 @@ require "events_test_util" require "spec_helper" +require "set" module LaunchDarkly module Impl describe EventSummarizer do subject { EventSummarizer } - let(:user) { { key: "key" } } + let(:context) { LaunchDarkly::LDContext.create({ key: "key" }) } it "does not add identify event to summary" do es = subject.new snapshot = es.snapshot - es.summarize_event({ kind: "identify", user: user }) + es.summarize_event({ kind: "identify", context: context }) expect(es.snapshot).to eq snapshot end @@ -21,7 +22,7 @@ module Impl it "does not add custom event to summary" do es = subject.new snapshot = es.snapshot - es.summarize_event({ kind: "custom", key: "whatever", user: user }) + es.summarize_event({ kind: "custom", key: "whatever", context: context }) expect(es.snapshot).to eq snapshot end @@ -29,9 +30,9 @@ module Impl it "tracks start and end dates" do es = subject.new flag = { key: "key" } - event1 = make_eval_event(2000, user, 'flag1') - event2 = make_eval_event(1000, user, 'flag1') - event3 = make_eval_event(1500, user, 'flag1') + event1 = make_eval_event(2000, context, 'flag1') + event2 = make_eval_event(1000, context, 'flag1') + event3 = make_eval_event(1500, context, 'flag1') es.summarize_event(event1) es.summarize_event(event2) es.summarize_event(event3) @@ -45,11 +46,11 @@ module Impl es = subject.new flag1 = { key: "key1", version: 11 } flag2 = { key: "key2", version: 22 } - event1 = make_eval_event(0, user, 'key1', 11, 1, 'value1', nil, 'default1') - event2 = make_eval_event(0, user, 'key1', 11, 2, 'value2', nil, 'default1') - event3 = make_eval_event(0, user, 'key2', 22, 1, 'value99', nil, 'default2') - event4 = make_eval_event(0, user, 'key1', 11, 1, 'value99', nil, 'default1') - event5 = make_eval_event(0, user, 'badkey', nil, nil, 'default3', nil, 'default3') + event1 = make_eval_event(0, context, 'key1', 11, 1, 'value1', nil, 'default1') + event2 = make_eval_event(0, context, 'key1', 11, 2, 'value2', nil, 'default1') + event3 = make_eval_event(0, context, 'key2', 22, 1, 'value99', nil, 'default2') + event4 = make_eval_event(0, context, 'key1', 11, 1, 'value99', nil, 'default1') + event5 = make_eval_event(0, context, 'badkey', nil, nil, 'default3', nil, 'default3') [event1, event2, event3, event4, event5].each { |e| es.summarize_event(e) } data = es.snapshot @@ -60,21 +61,24 @@ module Impl 1 => EventSummaryFlagVariationCounter.new('value1', 2), 2 => EventSummaryFlagVariationCounter.new('value2', 1), }, - } + }, + Set.new(["user"]) ), 'key2' => EventSummaryFlagInfo.new( 'default2', { 22 => { 1 => EventSummaryFlagVariationCounter.new('value99', 1), }, - } + }, + Set.new(["user"]) ), 'badkey' => EventSummaryFlagInfo.new( 'default3', { nil => { nil => EventSummaryFlagVariationCounter.new('default3', 1), }, - } + }, + Set.new(["user"]) ), } expect(data.counters).to eq expectedCounters diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index d17a54bf..9f9de608 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -21,7 +21,7 @@ module LaunchDarkly with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be true - expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be true + expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_context, false)).to be true end end end @@ -32,94 +32,91 @@ module LaunchDarkly with_client(test_config(stream: false, data_source: nil, base_uri: poll_server.base_uri.to_s)) do |client| expect(client.initialized?).to be false - expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_user, false)).to be false + expect(client.variation(ALWAYS_TRUE_FLAG[:key], basic_context, false)).to be false end end end - # TODO: Fix for u2c events - # it "sends event without diagnostics" do - # with_server do |events_server| - # events_server.setup_ok_response("/bulk", "") - # - # config = test_config( - # send_events: true, - # events_uri: events_server.base_uri.to_s, - # diagnostic_opt_out: true - # ) - # with_client(config) do |client| - # client.identify(basic_user) - # client.flush - # - # req, body = events_server.await_request_with_body - # expect(req.header['authorization']).to eq [ sdk_key ] - # expect(req.header['connection']).to eq [ "Keep-Alive" ] - # data = JSON.parse(body) - # expect(data.length).to eq 1 - # expect(data[0]["kind"]).to eq "identify" - # end - # end - # end - - # TODO: Fix for u2c events - # it "sends diagnostic event" do - # with_server do |events_server| - # events_server.setup_ok_response("/bulk", "") - # events_server.setup_ok_response("/diagnostic", "") - # - # config = test_config( - # send_events: true, - # events_uri: events_server.base_uri.to_s - # ) - # with_client(config) do |client| - # client.identify(basic_user) - # client.flush - # - # req0, body0 = events_server.await_request_with_body - # req1, body1 = events_server.await_request_with_body - # req = req0.path == "/diagnostic" ? req0 : req1 - # body = req0.path == "/diagnostic" ? body0 : body1 - # expect(req.header['authorization']).to eq [ sdk_key ] - # expect(req.header['connection']).to eq [ "Keep-Alive" ] - # data = JSON.parse(body) - # expect(data["kind"]).to eq "diagnostic-init" - # end - # end - # end - - # TODO: Fix for u2c events - # it "can use socket factory" do - # with_server do |poll_server| - # with_server do |events_server| - # events_server.setup_ok_response("/bulk", "") - # poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") - # - # config = test_config( - # stream: false, - # data_source: nil, - # send_events: true, - # base_uri: "http://fake-polling-server", - # events_uri: "http://fake-events-server", - # diagnostic_opt_out: true, - # socket_factory: SocketFactoryFromHash.new({ - # "fake-polling-server" => poll_server.port, - # "fake-events-server" => events_server.port, - # }) - # ) - # with_client(config) do |client| - # client.identify(basic_user) - # client.flush - # - # req, body = events_server.await_request_with_body - # expect(req.header['authorization']).to eq [ sdk_key ] - # expect(req.header['connection']).to eq [ "Keep-Alive" ] - # data = JSON.parse(body) - # expect(data.length).to eq 1 - # expect(data[0]["kind"]).to eq "identify" - # end - # end - # end - # end + it "sends event without diagnostics" do + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + + config = test_config( + send_events: true, + events_uri: events_server.base_uri.to_s, + diagnostic_opt_out: true + ) + with_client(config) do |client| + client.identify(basic_context) + client.flush + + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" + end + end + end + + it "sends diagnostic event" do + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + events_server.setup_ok_response("/diagnostic", "") + + config = test_config( + send_events: true, + events_uri: events_server.base_uri.to_s + ) + with_client(config) do |client| + client.identify(basic_context) + client.flush + + req0, body0 = events_server.await_request_with_body + req1, body1 = events_server.await_request_with_body + req = req0.path == "/diagnostic" ? req0 : req1 + body = req0.path == "/diagnostic" ? body0 : body1 + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data["kind"]).to eq "diagnostic-init" + end + end + end + + it "can use socket factory" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = test_config( + stream: false, + data_source: nil, + send_events: true, + base_uri: "http://fake-polling-server", + events_uri: "http://fake-events-server", + diagnostic_opt_out: true, + socket_factory: SocketFactoryFromHash.new({ + "fake-polling-server" => poll_server.port, + "fake-events-server" => events_server.port, + }) + ) + with_client(config) do |client| + client.identify(basic_context) + client.flush + + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ sdk_key ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" + end + end + end + end # TODO: TLS tests with self-signed cert end diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index cd4e5981..4ba71994 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -9,14 +9,14 @@ module LaunchDarkly context "variation" do it "returns the default value if the client is offline" do with_client(test_config(offline: true)) do |offline_client| - result = offline_client.variation("doesntmatter", basic_user, "default") + result = offline_client.variation("doesntmatter", basic_context, "default") expect(result).to eq "default" end end it "returns the default value for an unknown feature" do with_client(test_config) do |client| - expect(client.variation("badkey", basic_user, "default")).to eq "default" + expect(client.variation("badkey", basic_context, "default")).to eq "default" end end @@ -25,7 +25,7 @@ module LaunchDarkly td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) with_client(test_config(data_source: td)) do |client| - expect(client.variation("flagkey", basic_user, "default")).to eq "value" + expect(client.variation("flagkey", basic_context, "default")).to eq "value" end end @@ -38,13 +38,13 @@ module LaunchDarkly }) with_client(test_config(data_source: td)) do |client| - expect(client.variation("flagkey", basic_user, "default")).to eq "default" + expect(client.variation("flagkey", basic_context, "default")).to eq "default" end end it "can evaluate a flag that references a segment" do td = Integrations::TestData.data_source - segment = SegmentBuilder.new("segmentkey").included(basic_user.key).build + segment = SegmentBuilder.new("segmentkey").included(basic_context.key).build td.use_preconfigured_segment(segment) td.use_preconfigured_flag( FlagBuilder.new("flagkey").on(true).variations(true, false).rule( @@ -52,7 +52,7 @@ module LaunchDarkly ).build) with_client(test_config(data_source: td)) do |client| - expect(client.variation("flagkey", basic_user, false)).to be true + expect(client.variation("flagkey", basic_context, false)).to be true end end @@ -66,11 +66,11 @@ module LaunchDarkly ).build) segstore = MockBigSegmentStore.new - segstore.setup_segment_for_user(basic_user.key, segment, true) + segstore.setup_segment_for_user(basic_context.key, segment, true) big_seg_config = BigSegmentsConfig.new(store: segstore) with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| - expect(client.variation("flagkey", basic_user, false)).to be true + expect(client.variation("flagkey", basic_context, false)).to be true end end end @@ -81,7 +81,7 @@ module LaunchDarkly it "returns the default value if the client is offline" do with_client(test_config(offline: true)) do |offline_client| - result = offline_client.variation_detail("doesntmatter", basic_user, "default") + result = offline_client.variation_detail("doesntmatter", basic_context, "default") expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_CLIENT_NOT_READY)) expect(result).to eq expected end @@ -89,7 +89,7 @@ module LaunchDarkly it "returns the default value for an unknown feature" do with_client(test_config) do |client| - result = client.variation_detail("badkey", basic_user, "default") + result = client.variation_detail("badkey", basic_context, "default") expected = EvaluationDetail.new("default", nil, EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)) expect(result).to eq expected end @@ -100,7 +100,7 @@ module LaunchDarkly td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) with_client(test_config(data_source: td)) do |client| - result = client.variation_detail("flagkey", basic_user, "default") + result = client.variation_detail("flagkey", basic_context, "default") expected = EvaluationDetail.new("value", 0, EvaluationReason::off) expect(result).to eq expected end @@ -115,7 +115,7 @@ module LaunchDarkly }) with_client(test_config(data_source: td)) do |client| - result = client.variation_detail("flagkey", basic_user, "default") + result = client.variation_detail("flagkey", basic_context, "default") expected = EvaluationDetail.new("default", nil, EvaluationReason::off) expect(result).to eq expected expect(result.default_value?).to be true @@ -132,12 +132,12 @@ module LaunchDarkly ).build) segstore = MockBigSegmentStore.new - segstore.setup_segment_for_user(basic_user.key, segment, true) + segstore.setup_segment_for_user(basic_context.key, segment, true) segstore.setup_metadata(Time.now) big_seg_config = BigSegmentsConfig.new(store: segstore) with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| - result = client.variation_detail("flagkey", basic_user, false) + result = client.variation_detail("flagkey", basic_context, false) expect(result.value).to be true expect(result.reason.big_segments_status).to eq(BigSegmentsStatus::HEALTHY) end diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index 0a311bf6..9dfcae2c 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -5,7 +5,6 @@ require "model_builders" require "spec_helper" -# TODO: Fix all these commented out tests when addressing u2c events module LaunchDarkly describe "LDClient events tests" do def event_processor(client) @@ -19,26 +18,28 @@ def event_processor(client) end context "evaluation events - variation" do - # it "unknown flag" do - # with_client(test_config) do |client| - # expect(event_processor(client)).to receive(:record_eval_event).with( - # basic_user, 'badkey', nil, nil, 'default', nil, 'default', false, nil, nil - # ) - # client.variation("badkey", basic_user, "default") - # end - # end - - # it "known flag" do - # td = Integrations::TestData.data_source - # td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - # - # with_client(test_config(data_source: td)) do |client| - # expect(event_processor(client)).to receive(:record_eval_event).with( - # basic_user, 'flagkey', 1, 0, 'value', nil, 'default', false, nil, nil - # ) - # client.variation("flagkey", basic_user, "default") - # end - # end + it "unknown flag" do + with_client(test_config) do |client| + context = basic_context + expect(event_processor(client)).to receive(:record_eval_event).with( + context, 'badkey', nil, nil, 'default', nil, 'default', false, nil, nil + ) + client.variation("badkey", context, "default") + end + end + + it "known flag" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + + context = basic_context + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:record_eval_event).with( + context, 'flagkey', 1, 0, 'value', nil, 'default', false, nil, nil + ) + client.variation("flagkey", context, "default") + end + end it "does not send event, and logs error, if user is nil" do td = Integrations::TestData.data_source @@ -53,79 +54,83 @@ def event_processor(client) end end - # it "does not send event, and logs warning, if user key is nil" do - # td = Integrations::TestData.data_source - # td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) - # - # logger = double().as_null_object - # keyless_user = { key: nil } - # - # with_client(test_config(data_source: td, logger: logger)) do |client| - # expect(event_processor(client)).not_to receive(:record_eval_event) - # expect(logger).to receive(:warn) - # client.variation("flagkey", keyless_user, "default") - # end - # end - - # it "sets trackEvents and reason if trackEvents is set for matched rule" do - # td = Integrations::TestData.data_source - # td.use_preconfigured_flag( - # FlagBuilder.new("flagkey").version(100).on(true).variations("value") - # .rule(RuleBuilder.new.variation(0).id("id").track_events(true) - # .clause(Clauses.match_user(basic_user))) - # .build - # ) - # - # with_client(test_config(data_source: td)) do |client| - # expect(event_processor(client)).to receive(:record_eval_event).with( - # basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::rule_match(0, 'id'), - # 'default', true, nil, nil - # ) - # client.variation("flagkey", basic_user, "default") - # end - # end - - # it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do - # td = Integrations::TestData.data_source - # td.use_preconfigured_flag( - # FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0) - # .track_events_fallthrough(true).build - # ) - # - # with_client(test_config(data_source: td)) do |client| - # expect(event_processor(client)).to receive(:record_eval_event).with( - # basic_user, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::fallthrough, - # 'default', true, nil, nil - # ) - # client.variation("flagkey", basic_user, "default") - # end - # end + it "does not send event, and logs error, if user key is nil" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + + logger = double().as_null_object + keyless_user = { key: nil } + + with_client(test_config(data_source: td, logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:record_eval_event) + expect(logger).to receive(:error) + client.variation("flagkey", keyless_user, "default") + end + end + + it "sets trackEvents and reason if trackEvents is set for matched rule" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").version(100).on(true).variations("value") + .rule(RuleBuilder.new.variation(0).id("id").track_events(true) + .clause(Clauses.match_user(basic_context))) + .build + ) + + context = basic_context + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:record_eval_event).with( + context, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::rule_match(0, 'id'), + 'default', true, nil, nil + ) + client.variation("flagkey", context, "default") + end + end + + it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + td = Integrations::TestData.data_source + td.use_preconfigured_flag( + FlagBuilder.new("flagkey").version(100).on(true).variations("value").fallthrough_variation(0) + .track_events_fallthrough(true).build + ) + + context = basic_context + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:record_eval_event).with( + context, 'flagkey', 100, 0, 'value', LaunchDarkly::EvaluationReason::fallthrough, + 'default', true, nil, nil + ) + client.variation("flagkey", context, "default") + end + end end context "evaluation events - variation_detail" do - # it "unknown flag" do - # with_client(test_config) do |client| - # expect(event_processor(client)).to receive(:record_eval_event).with( - # basic_user, 'badkey', nil, nil, 'default', - # LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND), - # 'default', false, nil, nil - # ) - # client.variation_detail("badkey", basic_user, "default") - # end - # end - - # it "known flag" do - # td = Integrations::TestData.data_source - # td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) - # - # with_client(test_config(data_source: td)) do |client| - # expect(event_processor(client)).to receive(:record_eval_event).with( - # basic_user, 'flagkey', 1, 0, 'value', LaunchDarkly::EvaluationReason::off, - # 'default', false, nil, nil - # ) - # client.variation_detail("flagkey", basic_user, "default") - # end - # end + it "unknown flag" do + with_client(test_config) do |client| + context = basic_context + expect(event_processor(client)).to receive(:record_eval_event).with( + context, 'badkey', nil, nil, 'default', + LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND), + 'default', false, nil, nil + ) + client.variation_detail("badkey", context, "default") + end + end + + it "known flag" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + context = basic_context + with_client(test_config(data_source: td)) do |client| + expect(event_processor(client)).to receive(:record_eval_event).with( + context, 'flagkey', 1, 0, 'value', LaunchDarkly::EvaluationReason::off, + 'default', false, nil, nil + ) + client.variation_detail("flagkey", context, "default") + end + end it "does not send event, and logs error, if user is nil" do td = Integrations::TestData.data_source @@ -140,27 +145,28 @@ def event_processor(client) end end - # it "does not send event, and logs warning, if user key is nil" do - # td = Integrations::TestData.data_source - # td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) - # - # logger = double().as_null_object - # - # with_client(test_config(data_source: td, logger: logger)) do |client| - # expect(event_processor(client)).not_to receive(:record_eval_event) - # expect(logger).to receive(:warn) - # client.variation_detail("flagkey", { key: nil }, "default") - # end - # end + it "does not send event, and logs warning, if user key is nil" do + td = Integrations::TestData.data_source + td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) + + logger = double().as_null_object + + with_client(test_config(data_source: td, logger: logger)) do |client| + expect(event_processor(client)).not_to receive(:record_eval_event) + expect(logger).to receive(:error) + client.variation_detail("flagkey", { key: nil }, "default") + end + end end context "identify" do - # it "queues up an identify event" do - # with_client(test_config) do |client| - # expect(event_processor(client)).to receive(:record_identify_event).with(basic_user) - # client.identify(basic_user) - # end - # end + it "queues up an identify event" do + context = basic_context + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:record_identify_event).with(context) + client.identify(context) + end + end it "does not send event, and logs warning, if user is nil" do logger = double().as_null_object @@ -184,33 +190,23 @@ def event_processor(client) end context "track" do - # it "queues up an custom event" do - # with_client(test_config) do |client| - # expect(event_processor(client)).to receive(:record_custom_event).with( - # basic_user, 'custom_event_name', 42, nil - # ) - # client.track("custom_event_name", basic_user, 42) - # end - # end - - # it "can include a metric value" do - # with_client(test_config) do |client| - # expect(event_processor(client)).to receive(:record_custom_event).with( - # basic_user, 'custom_event_name', nil, 1.5 - # ) - # client.track("custom_event_name", basic_user, nil, 1.5) - # end - # end - - it "sanitizes the user in the event" do - numeric_key_user = { key: 33 } - sanitized_user = { key: "33" } + it "queues up an custom event" do + context = basic_context + with_client(test_config) do |client| + expect(event_processor(client)).to receive(:record_custom_event).with( + context, 'custom_event_name', 42, nil + ) + client.track("custom_event_name", context, 42) + end + end + it "can include a metric value" do + context = basic_context with_client(test_config) do |client| expect(event_processor(client)).to receive(:record_custom_event).with( - sanitized_user, 'custom_event_name', nil, nil + context, 'custom_event_name', nil, 1.5 ) - client.track("custom_event_name", numeric_key_user, nil) + client.track("custom_event_name", context, nil, 1.5) end end diff --git a/spec/mock_components.rb b/spec/mock_components.rb index 6bed7621..44849812 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -34,7 +34,7 @@ def with_client(config) end end -def basic_user +def basic_context LaunchDarkly::LDContext::create({ "key": "user-key" }) end From 8b7ee10f802f227394acde3cc770788ae87a3acd Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 13 Dec 2022 14:16:39 -0600 Subject: [PATCH 276/292] Add legacy user-type support to the contract tests (#222) --- contract-tests/client_entity.rb | 10 +++++----- contract-tests/service.rb | 1 + 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 062e9933..0c4cee9a 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -76,12 +76,12 @@ def evaluate(params) response = {} if params[:detail] - detail = @client.variation_detail(params[:flagKey], params[:context], params[:defaultValue]) + detail = @client.variation_detail(params[:flagKey], params[:context] || params[:user], params[:defaultValue]) response[:value] = detail.value response[:variationIndex] = detail.variation_index response[:reason] = detail.reason else - response[:value] = @client.variation(params[:flagKey], params[:context], params[:defaultValue]) + response[:value] = @client.variation(params[:flagKey], params[:context] || params[:user], params[:defaultValue]) end response @@ -93,15 +93,15 @@ def evaluate_all(params) opts[:with_reasons] = params[:withReasons] || false opts[:details_only_for_tracked_flags] = params[:detailsOnlyForTrackedFlags] || false - @client.all_flags_state(params[:context], opts) + @client.all_flags_state(params[:context] || params[:user], opts) end def track(params) - @client.track(params[:eventKey], params[:context], params[:data], params[:metricValue]) + @client.track(params[:eventKey], params[:context] || params[:user], params[:data], params[:metricValue]) end def identify(params) - @client.identify(params[:context]) + @client.identify(params[:context] || params[:user]) end def flush_events diff --git a/contract-tests/service.rb b/contract-tests/service.rb index f327c874..cb670354 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -30,6 +30,7 @@ 'all-flags-with-reasons', 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', + 'user-type', 'tags', ], }.to_json From ba4ec8c4f00412e2dfe127586926fd2ed7b45532 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 13 Dec 2022 14:23:12 -0600 Subject: [PATCH 277/292] Remove inline user configuration option (#223) --- contract-tests/client_entity.rb | 1 - lib/ldclient-rb/config.rb | 10 ------- lib/ldclient-rb/events.rb | 24 +++-------------- lib/ldclient-rb/impl/context.rb | 2 ++ lib/ldclient-rb/impl/diagnostic_events.rb | 1 - spec/diagnostic_events_spec.rb | 2 -- spec/events_spec.rb | 32 +++++++---------------- 7 files changed, 16 insertions(+), 56 deletions(-) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 0c4cee9a..842880de 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -31,7 +31,6 @@ def initialize(log, config) opts[:all_attributes_private] = !!events[:allAttributesPrivate] opts[:private_attribute_names] = events[:globalPrivateAttributes] opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) unless events[:flushIntervalMs].nil? - opts[:inline_users_in_events] = events[:inlineUsers] || false else opts[:send_events] = false end diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index f3e483aa..a60166c3 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -34,7 +34,6 @@ class Config # @option opts [Boolean] :send_events (true) See {#send_events}. # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. - # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. # @option opts [Object] :data_source See {#data_source}. # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. @@ -65,7 +64,6 @@ def initialize(opts = {}) @send_events = opts.has_key?(:send_events) ? opts[:send_events] : Config.default_send_events @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval - @inline_users_in_events = opts[:inline_users_in_events] || false @data_source = opts[:data_source] @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? @@ -249,14 +247,6 @@ def offline? # attr_reader :user_keys_flush_interval - # - # Whether to include full user details in every analytics event. By default, events will only - # include the user key, except for one "index" event that provides the full details for the user. - # The only reason to change this is if you are using the Analytics Data Stream. - # @return [Boolean] - # - attr_reader :inline_users_in_events - # # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, # the client uses its standard polling or streaming implementation; this is customizable for diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 8e6cc57e..062117a8 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -311,12 +311,10 @@ def dispatch_event(event, outbox) will_add_full_event = true end - # For each user we haven't seen before, we add an index event - unless this is already - # an identify event for that user. - unless will_add_full_event && @config.inline_users_in_events - if !event.context.nil? && !notice_context(event.context) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) - outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.context)) - end + # For each context we haven't seen before, we add an index event - unless this is already + # an identify event for that context. + if !event.context.nil? && !notice_context(event.context) && !event.is_a?(LaunchDarkly::Impl::IdentifyEvent) + outbox.add_event(LaunchDarkly::Impl::IndexEvent.new(event.timestamp, event.context)) end outbox.add_event(event) if will_add_full_event @@ -453,7 +451,6 @@ class EventOutputFormatter ANONYMOUS_USER_CONTEXT_KIND = 'anonymousUser' def initialize(config) - @inline_users = config.inline_users_in_events @context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) end @@ -560,18 +557,5 @@ def make_output_events(events, summary) features: flags, } end - - private def set_opt_context_kind(out, user) - out[:contextKind] = ANONYMOUS_USER_CONTEXT_KIND if !user.nil? && user[:anonymous] - end - - private def set_user_or_user_key(out, user) - if @inline_users - out[:user] = @context_filter.filter(user) - else - key = user[:key] - out[:userKey] = key.is_a?(String) ? key : key.to_s - end - end end end diff --git a/lib/ldclient-rb/impl/context.rb b/lib/ldclient-rb/impl/context.rb index dea91e57..e1b9e7a0 100644 --- a/lib/ldclient-rb/impl/context.rb +++ b/lib/ldclient-rb/impl/context.rb @@ -1,3 +1,5 @@ +require "erb" + module LaunchDarkly module Impl module Context diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb index da5aa03e..7c76ba3a 100644 --- a/lib/ldclient-rb/impl/diagnostic_events.rb +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -73,7 +73,6 @@ def self.make_config_data(config) diagnosticRecordingIntervalMillis: self.seconds_to_millis(config.diagnostic_recording_interval), eventsCapacity: config.capacity, eventsFlushIntervalMillis: self.seconds_to_millis(config.flush_interval), - inlineUsersInEvents: config.inline_users_in_events, pollingIntervalMillis: self.seconds_to_millis(config.poll_interval), socketTimeoutMillis: self.seconds_to_millis(config.read_timeout), streamingDisabled: !config.stream?, diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index d8dc752c..9ede42e6 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -31,7 +31,6 @@ def expected_default_config diagnosticRecordingIntervalMillis: Config.default_diagnostic_recording_interval * 1000, eventsCapacity: Config.default_capacity, eventsFlushIntervalMillis: Config.default_flush_interval * 1000, - inlineUsersInEvents: false, pollingIntervalMillis: Config.default_poll_interval * 1000, socketTimeoutMillis: Config.default_read_timeout * 1000, streamingDisabled: false, @@ -64,7 +63,6 @@ def expected_default_config [ { diagnostic_recording_interval: 9999 }, { diagnosticRecordingIntervalMillis: 9999000 } ], [ { capacity: 4000 }, { eventsCapacity: 4000 } ], [ { flush_interval: 46 }, { eventsFlushIntervalMillis: 46000 } ], - [ { inline_users_in_events: true }, { inlineUsersInEvents: true } ], [ { poll_interval: 999 }, { pollingIntervalMillis: 999000 } ], [ { read_timeout: 46 }, { socketTimeoutMillis: 46000 } ], [ { stream: false }, { streamingDisabled: true } ], diff --git a/spec/events_spec.rb b/spec/events_spec.rb index adfe7e26..292629c5 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -80,27 +80,15 @@ def with_processor_and_sender(config) end it "filters context in feature event" do - config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) with_processor_and_sender(config) do |ep, sender| flag = { key: "flagkey", version: 11 } ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, true) - output = flush_and_get_events(ep, sender) - expect(output).to contain_exactly( - eq(feature_event(flag, context, 1, 'value', true)), - include(:kind => "summary") - ) - end - end - - it "still generates index event if inline_users is true but feature event was not tracked" do - config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) - with_processor_and_sender(config) do |ep, sender| - ep.record_eval_event(context, 'flagkey', 11, 1, 'value', nil, nil, false) - output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( eq(index_event(config, context)), + eq(feature_event(flag, context, 1, 'value')), include(:kind => "summary") ) end @@ -197,8 +185,8 @@ def with_processor_and_sender(config) output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( eq(index_event(default_config, context)), - eq(feature_event(flag1, context, 1, 'value', false, starting_timestamp)), - eq(feature_event(flag2, context, 1, 'value', false, starting_timestamp + 1)), + eq(feature_event(flag1, context, 1, 'value', starting_timestamp)), + eq(feature_event(flag2, context, 1, 'value', starting_timestamp + 1)), include(:kind => "summary") ) end @@ -250,13 +238,14 @@ def with_processor_and_sender(config) end it "filters context in custom event" do - config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) with_processor_and_sender(config) do |ep, sender| ep.record_custom_event(context, 'eventkey') output = flush_and_get_events(ep, sender) expect(output).to contain_exactly( - eq(custom_event(context, 'eventkey', nil, nil, true)) + eq(index_event(config, context)), + eq(custom_event(context, 'eventkey', nil, nil)) ) end end @@ -421,7 +410,7 @@ def identify_event(config, context, timestamp = starting_timestamp) # @param timestamp [Integer] # @return [Hash] # - def feature_event(flag, context, variation, value, inline_user = false, timestamp = starting_timestamp) + def feature_event(flag, context, variation, value, timestamp = starting_timestamp) out = { kind: 'feature', creationDate: timestamp, @@ -462,13 +451,12 @@ def debug_event(config, flag, context, variation, value, timestamp = starting_ti # @param key [String] # @param data [any] # @param metric_value [any] - # @param timestamp [Integer] # @return [Hash] # - def custom_event(context, key, data, metric_value, inline_user = false, timestamp = starting_timestamp) + def custom_event(context, key, data, metric_value) out = { kind: "custom", - creationDate: timestamp, + creationDate: starting_timestamp, contextKeys: context.keys, key: key, } From d96cc9b7f323c3860df7eb23e837d5805f31cc6b Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 13 Dec 2022 14:46:20 -0600 Subject: [PATCH 278/292] Add context_ configuration options (#224) These new context_ configuration options are meant to replace the historic user_ options. If both are provided, the context_ variant will take precedence. --- lib/ldclient-rb/config.rb | 64 ++++++++++++++++++----- lib/ldclient-rb/events.rb | 8 +-- lib/ldclient-rb/impl/diagnostic_events.rb | 4 +- spec/config_spec.rb | 26 +++++++++ spec/diagnostic_events_spec.rb | 8 +-- 5 files changed, 87 insertions(+), 23 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index a60166c3..b3e64694 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -13,6 +13,14 @@ class Config # # Constructor for creating custom LaunchDarkly configurations. # + # `user_keys_capacity` and `user_keys_flush_interval` are deprecated + # configuration options. They exist to maintain backwards compatibility + # with previous configurations. Newer code should prefer their replacement + # options -- `context_keys_capacity` and `context_keys_flush_interval`. + # + # In the event both the user and context variations are provided, the + # context specific configuration option will take precedence. + # # @param opts [Hash] the configuration options # @option opts [Logger] :logger See {#logger}. # @option opts [String] :base_uri ("https://sdk.launchdarkly.com") See {#base_uri}. @@ -33,7 +41,9 @@ class Config # @option opts [Array] :private_attribute_names See {#private_attribute_names}. # @option opts [Boolean] :send_events (true) See {#send_events}. # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. + # @option opts [Integer] :context_keys_capacity (1000) See {#context_keys_capacity}. # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. + # @option opts [Float] :context_keys_flush_interval (300) See {#context_keys_flush_interval}. # @option opts [Object] :data_source See {#data_source}. # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. @@ -62,8 +72,8 @@ def initialize(opts = {}) @all_attributes_private = opts[:all_attributes_private] || false @private_attribute_names = opts[:private_attribute_names] || [] @send_events = opts.has_key?(:send_events) ? opts[:send_events] : Config.default_send_events - @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity - @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval + @context_keys_capacity = opts[:context_keys_capacity] || opts[:user_keys_capacity] || Config.default_context_keys_capacity + @context_keys_flush_interval = opts[:context_keys_flush_interval] || opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @data_source = opts[:data_source] @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? @@ -233,19 +243,35 @@ def offline? attr_reader :send_events # - # The number of user keys that the event processor can remember at any one time. This reduces the - # amount of duplicate user details sent in analytics events. + # The number of context keys that the event processor can remember at any one time. This reduces the + # amount of duplicate context details sent in analytics events. + # @return [Integer] + # @see #context_keys_flush_interval + # + attr_reader :context_keys_capacity + + # + # @deprecated Backwards compatibility alias for #context_keys_capacity. + # # @return [Integer] - # @see #user_keys_flush_interval + # @see #context_keys_flush_interval # - attr_reader :user_keys_capacity + alias :user_keys_capacity :context_keys_capacity # - # The interval in seconds at which the event processor will reset its set of known user keys. + # The interval in seconds at which the event processor will reset its set of known context keys. # @return [Float] - # @see #user_keys_capacity + # @see #context_keys_capacity # - attr_reader :user_keys_flush_interval + attr_reader :context_keys_flush_interval + + # + # @deprecated Backwards compatibility alias for #context_keys_flush_interval. + # + # @return [Integer] + # @see #context_keys_flush_interval + # + alias :user_keys_flush_interval :context_keys_flush_interval # # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, @@ -483,21 +509,33 @@ def self.default_send_events end # - # The default value for {#user_keys_capacity}. + # The default value for {#context_keys_capacity}. # @return [Integer] 1000 # - def self.default_user_keys_capacity + def self.default_context_keys_capacity 1000 end # - # The default value for {#user_keys_flush_interval}. + # The default value for {#context_keys_flush_interval}. # @return [Float] 300 # - def self.default_user_keys_flush_interval + def self.default_context_keys_flush_interval 300 end + class << self + # + # @deprecated Backwards compatibility alias for #default_context_keys_capacity + # + alias :default_user_keys_capacity :default_context_keys_capacity + + # + # @deprecated Backwards compatibility alias for #default_context_keys_flush_interval + # + alias :default_user_keys_flush_interval :default_context_keys_flush_interval + end + # # The default value for {#diagnostic_recording_interval}. # @return [Float] 900 diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 062117a8..b189f904 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -117,10 +117,10 @@ def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test post_to_inbox(FlushMessage.new) end @flush_task.execute - @users_flush_task = Concurrent::TimerTask.new(execution_interval: config.user_keys_flush_interval) do + @contexts_flush_task = Concurrent::TimerTask.new(execution_interval: config.context_keys_flush_interval) do post_to_inbox(FlushUsersMessage.new) end - @users_flush_task.execute + @contexts_flush_task.execute if !diagnostic_accumulator.nil? interval = test_properties && test_properties.has_key?(:diagnostic_recording_interval) ? test_properties[:diagnostic_recording_interval] : @@ -176,7 +176,7 @@ def stop # final shutdown, which includes a final flush, is done synchronously if @stopped.make_true @flush_task.shutdown - @users_flush_task.shutdown + @contexts_flush_task.shutdown @diagnostic_event_task.shutdown unless @diagnostic_event_task.nil? # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox # is full; an orderly shutdown can't happen unless these messages are received. @@ -225,7 +225,7 @@ def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @diagnostic_accumulator = config.diagnostic_opt_out? ? nil : diagnostic_accumulator @event_sender = event_sender - @context_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) + @context_keys = SimpleLRUCacheSet.new(config.context_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb index 7c76ba3a..7bc26047 100644 --- a/lib/ldclient-rb/impl/diagnostic_events.rb +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -76,8 +76,8 @@ def self.make_config_data(config) pollingIntervalMillis: self.seconds_to_millis(config.poll_interval), socketTimeoutMillis: self.seconds_to_millis(config.read_timeout), streamingDisabled: !config.stream?, - userKeysCapacity: config.user_keys_capacity, - userKeysFlushIntervalMillis: self.seconds_to_millis(config.user_keys_flush_interval), + userKeysCapacity: config.context_keys_capacity, + userKeysFlushIntervalMillis: self.seconds_to_millis(config.context_keys_flush_interval), usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY') || ENV.has_key?('HTTPS_PROXY'), usingRelayDaemon: config.use_ldd?, } diff --git a/spec/config_spec.rb b/spec/config_spec.rb index 692e9257..2196bcad 100644 --- a/spec/config_spec.rb +++ b/spec/config_spec.rb @@ -93,4 +93,30 @@ end end end + + describe "context and user aliases" do + it "default values are aliased correctly" do + expect(LaunchDarkly::Config.default_context_keys_capacity).to eq LaunchDarkly::Config.default_user_keys_capacity + expect(LaunchDarkly::Config.default_context_keys_flush_interval).to eq LaunchDarkly::Config.default_user_keys_flush_interval + end + + it "context options are reflected in user options" do + config = subject.new(context_keys_capacity: 50, context_keys_flush_interval: 25) + expect(config.context_keys_capacity).to eq config.user_keys_capacity + expect(config.context_keys_flush_interval).to eq config.user_keys_flush_interval + end + + it "context options can be set by user options" do + config = subject.new(user_keys_capacity: 50, user_keys_flush_interval: 25) + expect(config.context_keys_capacity).to eq config.user_keys_capacity + expect(config.context_keys_flush_interval).to eq config.user_keys_flush_interval + end + + it "context options take precedence" do + config = subject.new(context_keys_capacity: 100, user_keys_capacity: 50, context_keys_flush_interval: 100, user_keys_flush_interval: 50) + + expect(config.context_keys_capacity).to eq 100 + expect(config.context_keys_flush_interval).to eq 100 + end + end end diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index 9ede42e6..786e3764 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -34,8 +34,8 @@ def expected_default_config pollingIntervalMillis: Config.default_poll_interval * 1000, socketTimeoutMillis: Config.default_read_timeout * 1000, streamingDisabled: false, - userKeysCapacity: Config.default_user_keys_capacity, - userKeysFlushIntervalMillis: Config.default_user_keys_flush_interval * 1000, + userKeysCapacity: Config.default_context_keys_capacity, + userKeysFlushIntervalMillis: Config.default_context_keys_flush_interval * 1000, usingProxy: false, usingRelayDaemon: false, } @@ -66,8 +66,8 @@ def expected_default_config [ { poll_interval: 999 }, { pollingIntervalMillis: 999000 } ], [ { read_timeout: 46 }, { socketTimeoutMillis: 46000 } ], [ { stream: false }, { streamingDisabled: true } ], - [ { user_keys_capacity: 999 }, { userKeysCapacity: 999 } ], - [ { user_keys_flush_interval: 999 }, { userKeysFlushIntervalMillis: 999000 } ], + [ { context_keys_capacity: 999 }, { userKeysCapacity: 999 } ], + [ { context_keys_flush_interval: 999 }, { userKeysFlushIntervalMillis: 999000 } ], [ { use_ldd: true }, { usingRelayDaemon: true } ], ] changes_and_expected.each do |config_values, expected_values| From f0f54771a5af542d97bdad279ce4114b603ff542 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 13 Dec 2022 15:16:34 -0600 Subject: [PATCH 279/292] Add support for flag context targets (#225) --- Makefile | 2 - lib/ldclient-rb/impl/evaluator.rb | 77 ++++++++++++++++------ lib/ldclient-rb/impl/model/feature_flag.rb | 11 ++++ 3 files changed, 67 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index 6fdbd3f5..91992b7c 100644 --- a/Makefile +++ b/Makefile @@ -6,8 +6,6 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log # - various other "evaluation" subtests: These tests require context kind support. # - "events": These test suites will be unavailable until more of the U2C implementation is done. TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ - -skip 'evaluation/parameterized/target match/context targets' \ - -skip 'evaluation/parameterized/target match/multi-kind' \ -skip 'big segments' build-contract-tests: diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 967a2eee..7a2c5835 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -170,14 +170,12 @@ def self.make_big_segment_ref(segment) # method is visible for testing "#{segment.key}.g#{segment.generation}" end - private - # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] the flag # @param context [LaunchDarkly::LDContext] the evaluation context # @param eval_result [EvalResult] # @param state [EvaluatorState] # @raise [EvaluationException] - def eval_internal(flag, context, eval_result, state) + private def eval_internal(flag, context, eval_result, state) unless flag.on return flag.off_result end @@ -186,13 +184,8 @@ def eval_internal(flag, context, eval_result, state) return prereq_failure_result unless prereq_failure_result.nil? # Check context target matches - flag.targets.each do |target| - target.values.each do |value| - if value == context.key - return target.match_result - end - end - end + target_result = check_targets(context, flag) + return target_result unless target_result.nil? # Check custom rules flag.rules.each do |rule| @@ -214,7 +207,7 @@ def eval_internal(flag, context, eval_result, state) # @param eval_result [EvalResult] # @param state [EvaluatorState] # @raise [EvaluationException] if a flag prereq cycle is detected - def check_prerequisites(flag, context, eval_result, state) + private def check_prerequisites(flag, context, eval_result, state) return if flag.prerequisites.empty? state.prereq_stack.push(flag.key) @@ -263,7 +256,7 @@ def check_prerequisites(flag, context, eval_result, state) # @param eval_result [EvalResult] # @param state [EvaluatorState] # @raise [InvalidReferenceException] - def rule_match_context(rule, context, eval_result, state) + private def rule_match_context(rule, context, eval_result, state) rule.clauses.each do |clause| return false unless clause_match_context(clause, context, eval_result, state) end @@ -276,7 +269,7 @@ def rule_match_context(rule, context, eval_result, state) # @param eval_result [EvalResult] # @param state [EvaluatorState] # @raise [InvalidReferenceException] - def clause_match_context(clause, context, eval_result, state) + private def clause_match_context(clause, context, eval_result, state) # In the case of a segment match operator, we check if the context is in any of the segments, # and possibly negate if clause.op == :segmentMatch @@ -328,7 +321,7 @@ def clause_match_context(clause, context, eval_result, state) # @param context [LaunchDarkly::LDContext] # @return [Boolean] # @raise [InvalidReferenceException] Raised if the clause.attribute is an invalid reference - def clause_match_context_no_segments(clause, context) + private def clause_match_context_no_segments(clause, context) raise InvalidReferenceException.new(clause.attribute.error) unless clause.attribute.error.nil? if clause.attribute.depth == 1 && clause.attribute.component(0) == :kind @@ -355,7 +348,7 @@ def clause_match_context_no_segments(clause, context) # @param eval_result [EvalResult] # @param state [EvaluatorState] # @return [Boolean] - def segment_match_context(segment, context, eval_result, state) + private def segment_match_context(segment, context, eval_result, state) return big_segment_match_context(segment, context, eval_result, state) if segment.unbounded simple_segment_match_context(segment, context, true, eval_result, state) @@ -366,7 +359,7 @@ def segment_match_context(segment, context, eval_result, state) # @param eval_result [EvalResult] # @param state [EvaluatorState] # @return [Boolean] - def big_segment_match_context(segment, context, eval_result, state) + private def big_segment_match_context(segment, context, eval_result, state) unless segment.generation # Big segment queries can only be done if the generation is known. If it's unset, # that probably means the data store was populated by an older SDK that doesn't know @@ -397,7 +390,7 @@ def big_segment_match_context(segment, context, eval_result, state) # @param use_includes_and_excludes [Boolean] # @param state [EvaluatorState] # @return [Boolean] - def simple_segment_match_context(segment, context, use_includes_and_excludes, eval_result, state) + private def simple_segment_match_context(segment, context, use_includes_and_excludes, eval_result, state) if use_includes_and_excludes if EvaluatorHelpers.context_key_in_target_list(context, nil, segment.included) return true @@ -440,7 +433,7 @@ def simple_segment_match_context(segment, context, use_includes_and_excludes, ev # @param salt [String] # @return [Boolean] # @raise [InvalidReferenceException] - def segment_rule_match_context(rule, context, segment_key, salt, eval_result, state) + private def segment_rule_match_context(rule, context, segment_key, salt, eval_result, state) rule.clauses.each do |c| return false unless clause_match_context(c, context, eval_result, state) end @@ -459,9 +452,7 @@ def segment_rule_match_context(rule, context, segment_key, salt, eval_result, st bucket.nil? || bucket < weight end - private - - def get_value_for_variation_or_rollout(flag, vr, context, precomputed_results) + private def get_value_for_variation_or_rollout(flag, vr, context, precomputed_results) index, in_experiment = EvaluatorBucketing.variation_index_for_context(flag, vr, context) if index.nil? @@ -470,6 +461,50 @@ def get_value_for_variation_or_rollout(flag, vr, context, precomputed_results) end precomputed_results.for_variation(index, in_experiment) end + + # @param [LaunchDarkly::LDContext] context + # @param [LaunchDarkly::Impl::Model::FeatureFlag] flag + # @return [LaunchDarkly::EvaluationDetail, nil] + private def check_targets(context, flag) + user_targets = flag.targets + context_targets = flag.context_targets + + if context_targets.empty? + unless user_targets.empty? + user_context = context.individual_context(LDContext::KIND_DEFAULT) + return nil if user_context.nil? + + user_targets.each do |target| + if target.values.include?(user_context.key) # rubocop:disable Performance/InefficientHashSearch + return target.match_result + end + end + end + + return nil + end + + context_targets.each do |target| + if target.kind == LDContext::KIND_DEFAULT + user_context = context.individual_context(LDContext::KIND_DEFAULT) + next if user_context.nil? + + user_key = user_context.key + user_targets.each do |user_target| + if user_target.variation == target.variation + if user_target.values.include?(user_key) # rubocop:disable Performance/InefficientHashSearch + return target.match_result + end + break + end + end + elsif EvaluatorHelpers.context_key_in_target_list(context, target.kind, target.values) + return target.match_result + end + end + + nil + end end end end diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index bd6ef6d7..4852115f 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -27,6 +27,9 @@ def initialize(data, logger = nil) @targets = (data[:targets] || []).map do |target_data| Target.new(target_data, self, logger) end + @context_targets = (data[:contextTargets] || []).map do |target_data| + Target.new(target_data, self, logger) + end @rules = (data[:rules] || []).map.with_index do |rule_data, index| FlagRule.new(rule_data, index, self, logger) end @@ -60,6 +63,8 @@ def initialize(data, logger = nil) attr_reader :prerequisites # @return [Array] attr_reader :targets + # @return [Array] + attr_reader :context_targets # @return [Array] attr_reader :rules # @return [String] @@ -107,16 +112,22 @@ def initialize(data, flag, logger) class Target def initialize(data, flag, logger) + @kind = data[:contextKind] || LDContext::KIND_DEFAULT @data = data @values = data[:values] || [] + @variation = data[:variation] @match_result = EvaluatorHelpers.evaluation_detail_for_variation(flag, data[:variation], EvaluationReason::target_match, logger) end + # @return [String] + attr_reader :kind # @return [Hash] attr_reader :data # @return [Array] attr_reader :values + # @return [Integer] + attr_reader :variation # @return [LaunchDarkly::EvaluationDetail] attr_reader :match_result end From ec3470a0be4dab4edc170bac36c1a852d2882af0 Mon Sep 17 00:00:00 2001 From: Matthew Keeler Date: Wed, 14 Dec 2022 15:59:34 -0500 Subject: [PATCH 280/292] Bump diplomat --- launchdarkly-server-sdk.gemspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 95e73677..985bebdd 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -24,8 +24,8 @@ Gem::Specification.new do |spec| spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" spec.add_development_dependency "bundler", "2.2.33" spec.add_development_dependency "rspec", "~> 3.10" - spec.add_development_dependency "diplomat", "~> 2.4.2" spec.add_development_dependency "redis", "~> 4.2" + spec.add_development_dependency "diplomat", "~> 2.6" spec.add_development_dependency "connection_pool", "~> 2.2.3" spec.add_development_dependency "rspec_junit_formatter", "~> 0.4" spec.add_development_dependency "timecop", "~> 0.9" From 0f865c1eca0fdb74b847ef87f3d9bc1099d28f6e Mon Sep 17 00:00:00 2001 From: Matthew Keeler Date: Wed, 14 Dec 2022 16:02:03 -0500 Subject: [PATCH 281/292] Bump redis --- launchdarkly-server-sdk.gemspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 985bebdd..cdae86e8 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -24,8 +24,8 @@ Gem::Specification.new do |spec| spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" spec.add_development_dependency "bundler", "2.2.33" spec.add_development_dependency "rspec", "~> 3.10" - spec.add_development_dependency "redis", "~> 4.2" spec.add_development_dependency "diplomat", "~> 2.6" + spec.add_development_dependency "redis", "~> 5.0" spec.add_development_dependency "connection_pool", "~> 2.2.3" spec.add_development_dependency "rspec_junit_formatter", "~> 0.4" spec.add_development_dependency "timecop", "~> 0.9" From 26fb9f3447cfb34516666b59b61226315c0e59c3 Mon Sep 17 00:00:00 2001 From: Matthew Keeler Date: Wed, 14 Dec 2022 16:07:58 -0500 Subject: [PATCH 282/292] Remove oga --- launchdarkly-server-sdk.gemspec | 2 -- 1 file changed, 2 deletions(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index cdae86e8..9563c06f 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -33,8 +33,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency "webrick", "~> 1.7" spec.add_development_dependency "rubocop", "~> 1.37" spec.add_development_dependency "rubocop-performance", "~> 1.15" - # required by dynamodb - spec.add_development_dependency "oga", "~> 2.2" spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" From e1b6201326080ae5c307c822ed80d3ff66d2d6de Mon Sep 17 00:00:00 2001 From: Matthew Keeler Date: Wed, 14 Dec 2022 16:09:05 -0500 Subject: [PATCH 283/292] Bump connection_pool --- launchdarkly-server-sdk.gemspec | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 9563c06f..78a1afcd 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -26,7 +26,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "rspec", "~> 3.10" spec.add_development_dependency "diplomat", "~> 2.6" spec.add_development_dependency "redis", "~> 5.0" - spec.add_development_dependency "connection_pool", "~> 2.2.3" + spec.add_development_dependency "connection_pool", "~> 2.3" spec.add_development_dependency "rspec_junit_formatter", "~> 0.4" spec.add_development_dependency "timecop", "~> 0.9" spec.add_development_dependency "listen", "~> 3.3" # see file_data_source.rb From 5e7f57b3939b70f9753e790542e73a833839b08d Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 20 Dec 2022 08:44:52 -0600 Subject: [PATCH 284/292] Favor set for faster target lookups (#228) A few of our internal models maintain arrays of values. These arrays can frequently be checked to see if they contain specific values. Since set lookups are much faster than array lookups, this commit changes the internal structure to a set for the values stored in Target and SegmentTarget. --- lib/ldclient-rb/impl/evaluator_helpers.rb | 2 +- lib/ldclient-rb/impl/model/feature_flag.rb | 5 +++-- lib/ldclient-rb/impl/model/segment.rb | 5 +++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_helpers.rb b/lib/ldclient-rb/impl/evaluator_helpers.rb index 60ef2240..842d734f 100644 --- a/lib/ldclient-rb/impl/evaluator_helpers.rb +++ b/lib/ldclient-rb/impl/evaluator_helpers.rb @@ -33,7 +33,7 @@ def self.evaluation_detail_for_variation(flag, index, reason, logger = nil) # # @param context [LaunchDarkly::LDContext] # @param kind [String, nil] - # @param keys [Array] + # @param keys [Enumerable] # @return [Boolean] # def self.context_key_in_target_list(context, kind, keys) diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index 4852115f..2f89905c 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -1,5 +1,6 @@ require "ldclient-rb/impl/evaluator_helpers" require "ldclient-rb/impl/model/clause" +require "set" # See serialization.rb for implementation notes on the data model classes. @@ -114,7 +115,7 @@ class Target def initialize(data, flag, logger) @kind = data[:contextKind] || LDContext::KIND_DEFAULT @data = data - @values = data[:values] || [] + @values = Set.new(data[:values] || []) @variation = data[:variation] @match_result = EvaluatorHelpers.evaluation_detail_for_variation(flag, data[:variation], EvaluationReason::target_match, logger) @@ -124,7 +125,7 @@ def initialize(data, flag, logger) attr_reader :kind # @return [Hash] attr_reader :data - # @return [Array] + # @return [Set] attr_reader :values # @return [Integer] attr_reader :variation diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index be464827..49c8f799 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -1,5 +1,6 @@ require "ldclient-rb/impl/model/clause" require "ldclient-rb/impl/model/preprocessed_data" +require "set" # See serialization.rb for implementation notes on the data model classes. @@ -82,14 +83,14 @@ class SegmentTarget def initialize(data) @data = data @context_kind = data[:contextKind] - @values = data[:values] || [] + @values = Set.new(data[:values] || []) end # @return [Hash] attr_reader :data # @return [String] attr_reader :context_kind - # @return [Array] + # @return [Set] attr_reader :values end From 4f1437e038822267811d9f2a7a665c088910f086 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 20 Dec 2022 08:45:30 -0600 Subject: [PATCH 285/292] Add secure mode hash to contract tests (#229) --- contract-tests/client_entity.rb | 4 ++++ contract-tests/service.rb | 4 ++++ lib/ldclient-rb/ldclient.rb | 16 +++++++++++----- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 842880de..d77af65a 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -95,6 +95,10 @@ def evaluate_all(params) @client.all_flags_state(params[:context] || params[:user], opts) end + def secure_mode_hash(params) + @client.secure_mode_hash(params[:context] || params[:user]) + end + def track(params) @client.track(params[:eventKey], params[:context] || params[:user], params[:data], params[:metricValue]) end diff --git a/contract-tests/service.rb b/contract-tests/service.rb index cb670354..c542f6a2 100644 --- a/contract-tests/service.rb +++ b/contract-tests/service.rb @@ -30,6 +30,7 @@ 'all-flags-with-reasons', 'all-flags-client-side-only', 'all-flags-details-only-for-tracked-flags', + 'secure-mode-hash', 'user-type', 'tags', ], @@ -85,6 +86,9 @@ when "evaluateAll" response = {:state => client.evaluate_all(params[:evaluateAll])} return [200, nil, response.to_json] + when "secureModeHash" + response = {:result => client.secure_mode_hash(params[:secureModeHash])} + return [200, nil, response.to_json] when "customEvent" client.track(params[:customEvent]) return 201 diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 3cdd8f27..ffd97440 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -120,14 +120,20 @@ def flush end # - # Creates a hash string that can be used by the JavaScript SDK to identify a user. + # Creates a hash string that can be used by the JavaScript SDK to identify a context. # For more information, see [Secure mode](https://docs.launchdarkly.com/sdk/features/secure-mode#ruby). # - # @param user [Hash] the user properties - # @return [String] a hash string + # @param context [Hash, LDContext] + # @return [String, nil] a hash string or nil if the provided context was invalid # - def secure_mode_hash(user) - OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) + def secure_mode_hash(context) + context = Impl::Context::make_context(context) + unless context.valid? + @config.logger.warn("secure_mode_hash called with invalid context: #{context.error}") + return nil + end + + OpenSSL::HMAC.hexdigest("sha256", @sdk_key, context.fully_qualified_key) end # From 5d485387225c8485942c040cfe53c163abfdfb73 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 20 Dec 2022 09:51:24 -0600 Subject: [PATCH 286/292] Update big segment support for users to context (#226) To support the users to context change for big segments, this commit makes the following changes: - Introduces a new `Segment.unboundedContextKind` attribute. This will default to `LDContext::KIND_DEFAULT` and is only referenced when `Segment.unbounded` is true. - With the creation of multi-kind contexts, a single evaluation may result in multiple queries to the big segment store. This is reflected in the changes to the `EvalResult` processing. --- Makefile | 6 +--- contract-tests/big_segment_store_fixture.rb | 4 +-- lib/ldclient-rb/impl/evaluator.rb | 33 ++++++++++++++----- .../impl/integrations/dynamodb_impl.rb | 4 +-- .../impl/integrations/redis_impl.rb | 6 ++-- lib/ldclient-rb/impl/model/segment.rb | 3 ++ lib/ldclient-rb/interfaces.rb | 28 ++++++++-------- spec/mock_components.rb | 4 +-- 8 files changed, 52 insertions(+), 36 deletions(-) diff --git a/Makefile b/Makefile index 91992b7c..07676969 100644 --- a/Makefile +++ b/Makefile @@ -2,11 +2,7 @@ TEMP_TEST_OUTPUT=/tmp/contract-test-service.log # TEST_HARNESS_PARAMS can be set to add -skip parameters for any contract tests that cannot yet pass # Explanation of current skips: -# - "evaluation/parameterized/prerequisites": Can't pass yet because prerequisite cycle detection is not implemented. -# - various other "evaluation" subtests: These tests require context kind support. -# - "events": These test suites will be unavailable until more of the U2C implementation is done. -TEST_HARNESS_PARAMS := $(TEST_HARNESS_PARAMS) \ - -skip 'big segments' +TEST_HARNESS_PARAMS= build-contract-tests: @cd contract-tests && bundle _2.2.33_ install diff --git a/contract-tests/big_segment_store_fixture.rb b/contract-tests/big_segment_store_fixture.rb index 5681afed..b22552ad 100644 --- a/contract-tests/big_segment_store_fixture.rb +++ b/contract-tests/big_segment_store_fixture.rb @@ -11,8 +11,8 @@ def get_metadata LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(json['lastUpToDate']) end - def get_membership(user_hash) - response = HTTP.post("#{@uri}/getMembership", :json => {:userHash => user_hash}) + def get_membership(context_hash) + response = HTTP.post("#{@uri}/getMembership", :json => {:contextHash => context_hash}) json = response.parse(:json) json['values'] diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 7a2c5835..ce626cd2 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -368,20 +368,37 @@ def self.make_big_segment_ref(segment) # method is visible for testing eval_result.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED return false end - unless eval_result.big_segments_status - result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(context.key) + + matched_context = context.individual_context(segment.unbounded_context_kind) + return false if matched_context.nil? + + membership = eval_result.big_segments_membership.nil? ? nil : eval_result.big_segments_membership[matched_context.key] + + if membership.nil? + # Note that this query is just by key; the context kind doesn't matter because any given + # Big Segment can only reference one context kind. So if segment A for the "user" kind + # includes a "user" context with key X, and segment B for the "org" kind includes an "org" + # context with the same key X, it is fine to say that the membership for key X is + # segment A and segment B-- there is no ambiguity. + result = @get_big_segments_membership.nil? ? nil : @get_big_segments_membership.call(matched_context.key) if result - eval_result.big_segments_membership = result.membership eval_result.big_segments_status = result.status + + membership = result.membership + eval_result.big_segments_membership = {} if eval_result.big_segments_membership.nil? + eval_result.big_segments_membership[matched_context.key] = membership else - eval_result.big_segments_membership = nil eval_result.big_segments_status = BigSegmentsStatus::NOT_CONFIGURED end end - segment_ref = Evaluator.make_big_segment_ref(segment) - membership = eval_result.big_segments_membership - included = membership.nil? ? nil : membership[segment_ref] - return included unless included.nil? + + membership_result = nil + unless membership.nil? + segment_ref = Evaluator.make_big_segment_ref(segment) + membership_result = membership.nil? ? nil : membership[segment_ref] + end + + return membership_result unless membership_result.nil? simple_segment_match_context(segment, context, false, eval_result, state) end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index fc5543c3..b2660a3b 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -251,12 +251,12 @@ def get_metadata LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp) end - def get_membership(user_hash) + def get_membership(context_hash) data = @client.get_item( table_name: @table_name, key: { PARTITION_KEY => @prefix + KEY_USER_DATA, - SORT_KEY => user_hash, + SORT_KEY => context_hash, }) return nil unless data.item excluded_refs = data.item[ATTR_EXCLUDED] || [] diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 783af2a2..a9d55579 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -270,10 +270,10 @@ def get_metadata Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i) end - def get_membership(user_hash) + def get_membership(context_hash) with_connection do |redis| - included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + user_hash) - excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + user_hash) + included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + context_hash) + excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + context_hash) if !included_refs && !excluded_refs nil else diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index 49c8f799..d78036a7 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -29,6 +29,7 @@ def initialize(data, logger = nil) SegmentRule.new(rule_data, logger) end @unbounded = !!data[:unbounded] + @unbounded_context_kind = data[:unboundedContextKind] || LDContext::KIND_DEFAULT @generation = data[:generation] @salt = data[:salt] end @@ -53,6 +54,8 @@ def initialize(data, logger = nil) attr_reader :rules # @return [Boolean] attr_reader :unbounded + # @return [String] + attr_reader :unbounded_context_kind # @return [Integer|nil] attr_reader :generation # @return [String] diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index b62a90fb..64120dd5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -163,30 +163,30 @@ def get_metadata end # - # Queries the store for a snapshot of the current segment state for a specific user. + # Queries the store for a snapshot of the current segment state for a specific context. # - # The user_hash is a base64-encoded string produced by hashing the user key as defined by + # The context_hash is a base64-encoded string produced by hashing the context key as defined by # the Big Segments specification; the store implementation does not need to know the details # of how this is done, because it deals only with already-hashed keys, but the string can be # assumed to only contain characters that are valid in base64. # - # The return value should be either a Hash, or nil if the user is not referenced in any big + # The return value should be either a Hash, or nil if the context is not referenced in any big # segments. Each key in the Hash is a "segment reference", which is how segments are # identified in Big Segment data. This string is not identical to the segment key-- the SDK # will add other information. The store implementation should not be concerned with the - # format of the string. Each value in the Hash is true if the user is explicitly included in - # the segment, false if the user is explicitly excluded from the segment-- and is not also + # format of the string. Each value in the Hash is true if the context is explicitly included in + # the segment, false if the context is explicitly excluded from the segment-- and is not also # explicitly included (that is, if both an include and an exclude existed in the data, the - # include would take precedence). If the user's status in a particular segment is undefined, + # include would take precedence). If the context's status in a particular segment is undefined, # there should be no key or value for that segment. # # This Hash may be cached by the SDK, so it should not be modified after it is created. It # is a snapshot of the segment membership state at one point in time. # - # @param user_hash [String] - # @return [Hash] true/false values for Big Segments that reference this user + # @param context_hash [String] + # @return [Hash] true/false values for Big Segments that reference this context # - def get_membership(user_hash) + def get_membership(context_hash) end # @@ -216,7 +216,7 @@ def initialize(last_up_to_date) # # Information about the status of a Big Segment store, provided by {BigSegmentStoreStatusProvider}. # - # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # Big Segments are a specific type of segments. For more information, read the LaunchDarkly # documentation: https://docs.launchdarkly.com/home/users/big-segments # class BigSegmentStoreStatus @@ -226,11 +226,11 @@ def initialize(available, stale) end # True if the Big Segment store is able to respond to queries, so that the SDK can evaluate - # whether a user is in a segment or not. + # whether a context is in a segment or not. # # If this property is false, the store is not able to make queries (for instance, it may not have # a valid database connection). In this case, the SDK will treat any reference to a Big Segment - # as if no users are included in that segment. Also, the {EvaluationReason} associated with + # as if no contexts are included in that segment. Also, the {EvaluationReason} associated with # with any flag evaluation that references a Big Segment when the store is not available will # have a `big_segments_status` of `STORE_ERROR`. # @@ -259,14 +259,14 @@ def ==(other) # # The Big Segment store is the component that receives information about Big Segments, normally # from a database populated by the LaunchDarkly Relay Proxy. Big Segments are a specific type - # of user segments. For more information, read the LaunchDarkly documentation: + # of segments. For more information, read the LaunchDarkly documentation: # https://docs.launchdarkly.com/home/users/big-segments # # An implementation of this interface is returned by {LDClient#big_segment_store_status_provider}. # Application code never needs to implement this interface. # # There are two ways to interact with the status. One is to simply get the current status; if its - # `available` property is true, then the SDK is able to evaluate user membership in Big Segments, + # `available` property is true, then the SDK is able to evaluate context membership in Big Segments, # and the `stale`` property indicates whether the data might be out of date. # # The other way is to subscribe to status change notifications. Applications may wish to know if diff --git a/spec/mock_components.rb b/spec/mock_components.rb index 44849812..9570e0b2 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -62,8 +62,8 @@ def get_metadata @metadata end - def get_membership(user_hash) - @memberships[user_hash] + def get_membership(context_hash) + @memberships[context_hash] end def stop From 0046fdc0f7bfac3bb2f8d93e22269a497e12d540 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Tue, 20 Dec 2022 16:17:17 -0600 Subject: [PATCH 287/292] Drop support for ruby 2.6 (#227) Ruby 2.6 went EOL in March 2022. We originally didn't drop support for it as doing so would require dropping support for jRuby as well. However, jRuby recently released 9.4 which is Ruby 2.7+ compatible. --- .circleci/config.yml | 24 +++++++++--------------- .rubocop.yml | 1 + launchdarkly-server-sdk.gemspec | 4 ++-- spec/http_util.rb | 4 ++-- 4 files changed, 14 insertions(+), 19 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b1cc31db..6b6de30d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,7 +2,7 @@ version: 2.1 orbs: rubocop: hanachin/rubocop@0.0.6 - win: circleci/windows@4.1.1 + win: circleci/windows@5.0 workflows: version: 2 @@ -12,9 +12,6 @@ workflows: after-install-rubocop: - run: gem install rubocop-performance - build-test-windows - - build-test-linux: - name: Ruby 2.6 - docker-image: cimg/ruby:2.6 - build-test-linux: name: Ruby 2.7 docker-image: cimg/ruby:2.7 @@ -25,14 +22,13 @@ workflows: name: Ruby 3.1 docker-image: cimg/ruby:3.1 - build-test-linux: - name: JRuby 9.3 - docker-image: jruby:9.3-jdk + name: JRuby 9.4 + docker-image: jruby:9.4-jdk jruby: true jobs: build-test-windows: - executor: - name: win/default + executor: win/default steps: - checkout @@ -52,15 +48,14 @@ jobs: - run: name: "Setup Consul" command: | - iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.14.3/consul_1.14.3_windows_amd64.zip mkdir consul Expand-Archive -Path consul.zip -DestinationPath consul - sc.exe create "Consul" binPath="C:/Users/circleci/project/consul/consul.exe agent -dev" - run: name: "Run Consul" background: true working_directory: consul - command: sc.exe start "Consul" + command: .\consul.exe agent -dev -client 0.0.0.0 - run: name: "Setup Redis" @@ -74,12 +69,11 @@ jobs: name: "Run Redis" background: true working_directory: redis - command: | - ./redis-server --service-start + command: ./redis-server --service-start - run: ruby -v - - run: choco install msys2 --allow-downgrade -y --version 20200903.0.0 - - run: ridk.cmd exec pacman -S --noconfirm --needed base-devel mingw-w64-x86_64-toolchain + - run: choco install msys2 -y + - run: ridk.cmd install 3 # Install MINGW dev toolchain - run: gem install bundler -v 2.2.33 - run: bundle _2.2.33_ install diff --git a/.rubocop.yml b/.rubocop.yml index 94e5e93d..fe9b24ab 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -2,6 +2,7 @@ require: - rubocop-performance AllCops: + TargetRubyVersion: 2.7 Include: - lib/**/*.rb - spec/**/*.rb diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 78a1afcd..e45f9206 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -1,6 +1,6 @@ # coding: utf-8 -lib = File.expand_path("../lib", __FILE__) +lib = File.expand_path("lib", __dir__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) require "ldclient-rb/version" require "rake" @@ -19,7 +19,7 @@ Gem::Specification.new do |spec| spec.files = FileList["lib/**/*", "README.md", "LICENSE.txt"] spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.require_paths = ["lib"] - spec.required_ruby_version = ">= 2.6.0" + spec.required_ruby_version = ">= 2.7.0" spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" spec.add_development_dependency "bundler", "2.2.33" diff --git a/spec/http_util.rb b/spec/http_util.rb index 99b1710f..32cfd0fe 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -127,6 +127,6 @@ def initialize(ports = {}) end def open(uri, timeout) - TCPSocket.new 'localhost', @ports[uri] + TCPSocket.new '127.0.0.1', @ports[uri] end -end \ No newline at end of file +end From ae2dce9d5f1b35352d5c59afb2083006b08f7b05 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Wed, 21 Dec 2022 07:44:08 -0600 Subject: [PATCH 288/292] Update remaining references from user to contexts (#231) There are multiple places throughout the code where we are still referencing users. I have tried to update all the places where a rename seems reasonable or appropriate. There is still some work to do in the test flag builders, but that will be done in a subsequent commit. --- contract-tests/client_entity.rb | 6 +- lib/ldclient-rb.rb | 1 - lib/ldclient-rb/config.rb | 51 ++++++--- lib/ldclient-rb/context.rb | 4 - lib/ldclient-rb/evaluation_detail.rb | 12 +- lib/ldclient-rb/events.rb | 25 ++--- lib/ldclient-rb/flags_state.rb | 4 +- lib/ldclient-rb/impl/big_segments.rb | 12 +- lib/ldclient-rb/impl/evaluator.rb | 24 ++-- lib/ldclient-rb/impl/evaluator_operators.rb | 52 ++++----- lib/ldclient-rb/impl/event_summarizer.rb | 2 +- .../impl/integrations/dynamodb_impl.rb | 4 +- .../impl/integrations/redis_impl.rb | 14 +-- lib/ldclient-rb/integrations/dynamodb.rb | 2 +- lib/ldclient-rb/integrations/file_data.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 2 +- lib/ldclient-rb/ldclient.rb | 6 +- lib/ldclient-rb/user_filter.rb | 52 --------- spec/big_segment_store_spec_base.rb | 20 ++-- spec/context_spec.rb | 4 +- spec/events_test_util.rb | 8 +- spec/impl/big_segments_spec.rb | 84 +++++++------- spec/impl/context_spec.rb | 2 +- spec/impl/evaluator_big_segments_spec.rb | 34 +++--- spec/impl/evaluator_bucketing_spec.rb | 104 +++++++++--------- spec/impl/evaluator_segment_spec.rb | 66 +++++------ spec/impl/evaluator_spec_base.rb | 10 +- spec/integrations/dynamodb_stores_spec.rb | 6 +- spec/integrations/redis_stores_spec.rb | 6 +- spec/ldclient_evaluation_spec.rb | 8 +- spec/ldclient_events_spec.rb | 18 +-- spec/mock_components.rb | 9 +- spec/model_builders.rb | 6 +- spec/user_filter_spec.rb | 91 --------------- 34 files changed, 303 insertions(+), 448 deletions(-) delete mode 100644 lib/ldclient-rb/user_filter.rb delete mode 100644 spec/user_filter_spec.rb diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index d77af65a..487477cb 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -39,14 +39,14 @@ def initialize(log, config) big_segments = config[:bigSegments] store = BigSegmentStoreFixture.new(config[:bigSegments][:callbackUri]) - user_cache_time = big_segments[:userCacheTimeMs].nil? ? nil : big_segments[:userCacheTimeMs] / 1_000 + context_cache_time = big_segments[:userCacheTimeMs].nil? ? nil : big_segments[:userCacheTimeMs] / 1_000 status_poll_interval_ms = big_segments[:statusPollIntervalMs].nil? ? nil : big_segments[:statusPollIntervalMs] / 1_000 stale_after_ms = big_segments[:staleAfterMs].nil? ? nil : big_segments[:staleAfterMs] / 1_000 opts[:big_segments] = LaunchDarkly::BigSegmentsConfig.new( store: store, - user_cache_size: big_segments[:userCacheSize], - user_cache_time: user_cache_time, + context_cache_size: big_segments[:userCacheSize], + context_cache_time: context_cache_time, status_poll_interval: status_poll_interval_ms, stale_after: stale_after_ms ) diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 8ca766e5..37359e5a 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -20,7 +20,6 @@ module LaunchDarkly require "ldclient-rb/newrelic" require "ldclient-rb/stream" require "ldclient-rb/polling" -require "ldclient-rb/user_filter" require "ldclient-rb/simple_lru_cache" require "ldclient-rb/non_blocking_thread_pool" require "ldclient-rb/events" diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index b3e64694..b54afa24 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -213,7 +213,7 @@ def offline? attr_reader :feature_store # - # True if all user attributes (other than the key) should be considered private. This means + # True if all context attributes (other than the key) should be considered private. This means # that the attribute values will not be sent to LaunchDarkly in analytics events and will not # appear on the LaunchDarkly dashboard. # @return [Boolean] @@ -222,13 +222,14 @@ def offline? attr_reader :all_attributes_private # - # A list of user attribute names that should always be considered private. This means that the + # A list of context attribute names that should always be considered private. This means that the # attribute values will not be sent to LaunchDarkly in analytics events and will not appear on # the LaunchDarkly dashboard. # - # You can also specify the same behavior for an individual flag evaluation by storing an array - # of attribute names in the `:privateAttributeNames` property (note camelcase name) of the - # user object. + # You can also specify the same behavior for an individual flag evaluation + # by providing the context object with a list of private attributes. + # + # @see https://docs.launchdarkly.com/sdk/features/user-context-config#using-private-attributes # # @return [Array] # @see #all_attributes_private @@ -291,7 +292,7 @@ def offline? # # Configuration options related to Big Segments. # - # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # Big Segments are a specific type of segments. For more information, read the LaunchDarkly # documentation: https://docs.launchdarkly.com/home/users/big-segments # # @return [BigSegmentsConfig] @@ -556,7 +557,7 @@ def self.minimum_diagnostic_recording_interval # # Configuration options related to Big Segments. # - # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # Big Segments are a specific type of segments. For more information, read the LaunchDarkly # documentation: https://docs.launchdarkly.com/home/users/big-segments # # If your application uses Big Segments, you will need to create a `BigSegmentsConfig` that at a @@ -570,8 +571,8 @@ def self.minimum_diagnostic_recording_interval # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # class BigSegmentsConfig - DEFAULT_USER_CACHE_SIZE = 1000 - DEFAULT_USER_CACHE_TIME = 5 + DEFAULT_CONTEXT_CACHE_SIZE = 1000 + DEFAULT_CONTEXT_CACHE_TIME = 5 DEFAULT_STATUS_POLL_INTERVAL = 5 DEFAULT_STALE_AFTER = 2 * 60 @@ -579,15 +580,15 @@ class BigSegmentsConfig # Constructor for setting Big Segments options. # # @param store [LaunchDarkly::Interfaces::BigSegmentStore] the data store implementation - # @param user_cache_size [Integer] See {#user_cache_size}. - # @param user_cache_time [Float] See {#user_cache_time}. + # @param context_cache_size [Integer] See {#context_cache_size}. + # @param context_cache_time [Float] See {#context_cache_time}. # @param status_poll_interval [Float] See {#status_poll_interval}. # @param stale_after [Float] See {#stale_after}. # - def initialize(store:, user_cache_size: nil, user_cache_time: nil, status_poll_interval: nil, stale_after: nil) + def initialize(store:, context_cache_size: nil, context_cache_time: nil, status_poll_interval: nil, stale_after: nil) @store = store - @user_cache_size = user_cache_size.nil? ? DEFAULT_USER_CACHE_SIZE : user_cache_size - @user_cache_time = user_cache_time.nil? ? DEFAULT_USER_CACHE_TIME : user_cache_time + @context_cache_size = context_cache_size.nil? ? DEFAULT_CONTEXT_CACHE_SIZE : context_cache_size + @context_cache_time = context_cache_time.nil? ? DEFAULT_CONTEXT_CACHE_TIME : context_cache_time @status_poll_interval = status_poll_interval.nil? ? DEFAULT_STATUS_POLL_INTERVAL : status_poll_interval @stale_after = stale_after.nil? ? DEFAULT_STALE_AFTER : stale_after end @@ -597,14 +598,28 @@ def initialize(store:, user_cache_size: nil, user_cache_time: nil, status_poll_i # @return [LaunchDarkly::Interfaces::BigSegmentStore] attr_reader :store - # The maximum number of users whose Big Segment state will be cached by the SDK at any given time. + # The maximum number of contexts whose Big Segment state will be cached by the SDK at any given time. + # @return [Integer] + attr_reader :context_cache_size + + # + # @deprecated Backwards compatibility alias for #context_cache_size + # # @return [Integer] - attr_reader :user_cache_size + # + alias :user_cache_size :context_cache_size - # The maximum length of time (in seconds) that the Big Segment state for a user will be cached + # The maximum length of time (in seconds) that the Big Segment state for a context will be cached # by the SDK. # @return [Float] - attr_reader :user_cache_time + attr_reader :context_cache_time + + # + # @deprecated Backwards compatibility alias for #context_cache_time + # + # @return [Float] + # + alias :user_cache_time :context_cache_time # The interval (in seconds) at which the SDK will poll the Big Segment store to make sure it is # available and to determine how long ago it was updated. diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index 5d7575c4..6941e521 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -6,8 +6,6 @@ module LaunchDarkly # LDContext is a collection of attributes that can be referenced in flag # evaluations and analytics events. # - # (TKTK - some conceptual text here, and/or a link to a docs page) - # # To create an LDContext of a single kind, such as a user, you may use # {LDContext#create} or {LDContext#with_key}. # @@ -319,8 +317,6 @@ def self.with_key(key, kind = KIND_DEFAULT) # {https://docs.launchdarkly.com/sdk/features/user-config SDK # documentation}. # - # TKTK: Update this link once we know what the new one will be. - # # @param data [Hash] # @return [LDContext] # diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index 616faf7b..5d57aaff 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -71,13 +71,13 @@ class EvaluationReason # Value for {#kind} indicating that the flag was off and therefore returned its configured off value. OFF = :OFF - # Value for {#kind} indicating that the flag was on but the user did not match any targets or rules. + # Value for {#kind} indicating that the flag was on but the context did not match any targets or rules. FALLTHROUGH = :FALLTHROUGH - # Value for {#kind} indicating that the user key was specifically targeted for this flag. + # Value for {#kind} indicating that the context key was specifically targeted for this flag. TARGET_MATCH = :TARGET_MATCH - # Value for {#kind} indicating that the user matched one of the flag's rules. + # Value for {#kind} indicating that the context matched one of the flag's rules. RULE_MATCH = :RULE_MATCH # Value for {#kind} indicating that the flag was considered off because it had at least one @@ -100,8 +100,8 @@ class EvaluationReason # a rule specified a nonexistent variation. An error message will always be logged in this case. ERROR_MALFORMED_FLAG = :MALFORMED_FLAG - # Value for {#error_kind} indicating that the caller passed `nil` for the user parameter, or the - # user lacked a key. + # Value for {#error_kind} indicating that the caller passed `nil` for the context parameter, or the + # context was invalid. ERROR_USER_NOT_SPECIFIED = :USER_NOT_SPECIFIED # Value for {#error_kind} indicating that an unexpected exception stopped flag evaluation. An error @@ -141,7 +141,7 @@ class EvaluationReason # querying at least one Big Segment. Otherwise it returns `nil`. Possible values are defined by # {BigSegmentsStatus}. # - # Big Segments are a specific kind of user segments. For more information, read the LaunchDarkly + # Big Segments are a specific kind of context segments. For more information, read the LaunchDarkly # documentation: https://docs.launchdarkly.com/home/users/big-segments # @return [Symbol] attr_reader :big_segments_status diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index b189f904..73a3b136 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -21,7 +21,7 @@ # On a separate worker thread, EventDispatcher consumes events from the inbox. These are considered # "input events" because they may or may not actually be sent to LaunchDarkly; most flag evaluation # events are not sent, but are counted and the counters become part of a single summary event. -# EventDispatcher updates those counters, creates "index" events for any users that have not been seen +# EventDispatcher updates those counters, creates "index" events for any contexts that have not been seen # recently, and places any events that will be sent to LaunchDarkly into the "outbox" queue. # # When it is time to flush events to LaunchDarkly, the contents of the outbox are handed off to @@ -44,11 +44,11 @@ def record_eval_event( ) end - def record_identify_event(user) + def record_identify_event(context) end def record_custom_event( - user, + context, key, data = nil, metric_value = nil @@ -75,7 +75,7 @@ class FlushMessage end # @private - class FlushUsersMessage + class FlushContextsMessage end # @private @@ -118,7 +118,7 @@ def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test end @flush_task.execute @contexts_flush_task = Concurrent::TimerTask.new(execution_interval: config.context_keys_flush_interval) do - post_to_inbox(FlushUsersMessage.new) + post_to_inbox(FlushContextsMessage.new) end @contexts_flush_task.execute if !diagnostic_accumulator.nil? @@ -159,12 +159,12 @@ def record_eval_event( default, track_events, debug_until, prereq_of)) end - def record_identify_event(user) - post_to_inbox(LaunchDarkly::Impl::IdentifyEvent.new(timestamp, user)) + def record_identify_event(context) + post_to_inbox(LaunchDarkly::Impl::IdentifyEvent.new(timestamp, context)) end - def record_custom_event(user, key, data = nil, metric_value = nil) - post_to_inbox(LaunchDarkly::Impl::CustomEvent.new(timestamp, user, key, data, metric_value)) + def record_custom_event(context, key, data = nil, metric_value = nil) + post_to_inbox(LaunchDarkly::Impl::CustomEvent.new(timestamp, context, key, data, metric_value)) end def flush @@ -211,10 +211,6 @@ def wait_until_inactive end end end - - private def user_to_context_kind(user) - (user.nil? || !user[:anonymous]) ? 'user' : 'anonymousUser' - end end # @private @@ -256,7 +252,7 @@ def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) case message when FlushMessage trigger_flush(outbox, flush_workers) - when FlushUsersMessage + when FlushContextsMessage @context_keys.clear when DiagnosticEventMessage send_and_reset_diagnostics(outbox, diagnostic_event_workers) @@ -448,7 +444,6 @@ class EventOutputFormatter INDEX_KIND = 'index' DEBUG_KIND = 'debug' SUMMARY_KIND = 'summary' - ANONYMOUS_USER_CONTEXT_KIND = 'anonymousUser' def initialize(config) @context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 4657cfc6..57807859 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -2,7 +2,7 @@ module LaunchDarkly # - # A snapshot of the state of all feature flags with regard to a specific user, generated by + # A snapshot of the state of all feature flags with regard to a specific context, generated by # calling the {LDClient#all_flags_state}. Serializing this object to JSON using # `JSON.generate` (or the `to_json` method) will produce the appropriate data structure for # bootstrapping the LaunchDarkly JavaScript client. @@ -46,7 +46,7 @@ def add_flag(flag_state, with_reasons, details_only_if_tracked) end # Returns true if this object contains a valid snapshot of feature flag state, or false if the - # state could not be computed (for instance, because the client was offline or there was no user). + # state could not be computed (for instance, because the client was offline or there was no context). def valid? @valid end diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index 5f407cf1..77a1b9b1 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -36,14 +36,14 @@ def stop @store.stop unless @store.nil? end - def get_user_membership(user_key) + def get_context_membership(context_key) return nil unless @store - membership = @cache[user_key] + membership = @cache[context_key] unless membership begin - membership = @store.get_membership(BigSegmentStoreManager.hash_for_user_key(user_key)) + membership = @store.get_membership(BigSegmentStoreManager.hash_for_context_key(context_key)) membership = EMPTY_MEMBERSHIP if membership.nil? - @cache[user_key] = membership + @cache[context_key] = membership rescue => e LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e) return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR) @@ -80,8 +80,8 @@ def stale?(timestamp) !timestamp || ((Impl::Util.current_time_millis - timestamp) >= @stale_after_millis) end - def self.hash_for_user_key(user_key) - Digest::SHA256.base64digest(user_key) + def self.hash_for_context_key(context_key) + Digest::SHA256.base64digest(context_key) end end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index ce626cd2..ec71bd76 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -99,7 +99,7 @@ class Evaluator # @param get_flag [Function] called if the Evaluator needs to query a different flag from the one that it is # currently evaluating (i.e. a prerequisite flag); takes a single parameter, the flag key, and returns the # flag data - or nil if the flag is unknown or deleted - # @param get_segment [Function] similar to `get_flag`, but is used to query a user segment. + # @param get_segment [Function] similar to `get_flag`, but is used to query a context segment. # @param logger [Logger] the client's logger def initialize(get_flag, get_segment, get_big_segments_membership, logger) @get_flag = get_flag @@ -114,8 +114,8 @@ def initialize(get_flag, get_segment, get_big_segments_membership, logger) # or retaining it anywhere, we don't have to be quite as strict about immutability. # # The big_segments_status and big_segments_membership properties are not used by the caller; they are used - # during an evaluation to cache the result of any Big Segments query that we've done for this user, because - # we don't want to do multiple queries for the same user if multiple Big Segments are referenced in the same + # during an evaluation to cache the result of any Big Segments query that we've done for this context, because + # we don't want to do multiple queries for the same context if multiple Big Segments are referenced in the same # evaluation. EvalResult = Struct.new( :detail, # the EvaluationDetail representing the evaluation result @@ -332,13 +332,13 @@ def self.make_big_segment_ref(segment) # method is visible for testing matched_context = context.individual_context(clause.context_kind || LaunchDarkly::LDContext::KIND_DEFAULT) return false if matched_context.nil? - user_val = matched_context.get_value_for_reference(clause.attribute) - return false if user_val.nil? + context_val = matched_context.get_value_for_reference(clause.attribute) + return false if context_val.nil? - result = if user_val.is_a? Enumerable - user_val.any? { |uv| match_any_clause_value(clause, uv) } + result = if context_val.is_a? Enumerable + context_val.any? { |uv| match_any_clause_value(clause, uv) } else - match_any_clause_value(clause, user_val) + match_any_clause_value(clause, context_val) end clause.negate ? !result : result end @@ -483,15 +483,15 @@ def self.make_big_segment_ref(segment) # method is visible for testing # @param [LaunchDarkly::Impl::Model::FeatureFlag] flag # @return [LaunchDarkly::EvaluationDetail, nil] private def check_targets(context, flag) - user_targets = flag.targets + targets = flag.targets context_targets = flag.context_targets if context_targets.empty? - unless user_targets.empty? + unless targets.empty? user_context = context.individual_context(LDContext::KIND_DEFAULT) return nil if user_context.nil? - user_targets.each do |target| + targets.each do |target| if target.values.include?(user_context.key) # rubocop:disable Performance/InefficientHashSearch return target.match_result end @@ -507,7 +507,7 @@ def self.make_big_segment_ref(segment) # method is visible for testing next if user_context.nil? user_key = user_context.key - user_targets.each do |user_target| + targets.each do |user_target| if user_target.variation == target.variation if user_target.values.include?(user_key) # rubocop:disable Performance/InefficientHashSearch return target.match_result diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 68e33a22..574c30e0 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -9,24 +9,24 @@ module EvaluatorOperators # Applies an operator to produce a boolean result. # # @param op [Symbol] one of the supported LaunchDarkly operators, as a symbol - # @param user_value the value of the user attribute that is referenced in the current clause (left-hand + # @param context_value the value of the context attribute that is referenced in the current clause (left-hand # side of the expression) - # @param clause_value the constant value that `user_value` is being compared to (right-hand side of the + # @param clause_value the constant value that `context_value` is being compared to (right-hand side of the # expression) # @return [Boolean] true if the expression should be considered a match; false if it is not a match, or # if the values cannot be compared because they are of the wrong types, or if the operator is unknown - def self.apply(op, user_value, clause_value) + def self.apply(op, context_value, clause_value) case op when :in - user_value == clause_value + context_value == clause_value when :startsWith - string_op(user_value, clause_value, lambda { |a, b| a.start_with? b }) + string_op(context_value, clause_value, lambda { |a, b| a.start_with? b }) when :endsWith - string_op(user_value, clause_value, lambda { |a, b| a.end_with? b }) + string_op(context_value, clause_value, lambda { |a, b| a.end_with? b }) when :contains - string_op(user_value, clause_value, lambda { |a, b| a.include? b }) + string_op(context_value, clause_value, lambda { |a, b| a.include? b }) when :matches - string_op(user_value, clause_value, lambda { |a, b| + string_op(context_value, clause_value, lambda { |a, b| begin re = Regexp.new b !re.match(a).nil? @@ -35,26 +35,26 @@ def self.apply(op, user_value, clause_value) end }) when :lessThan - numeric_op(user_value, clause_value, lambda { |a, b| a < b }) + numeric_op(context_value, clause_value, lambda { |a, b| a < b }) when :lessThanOrEqual - numeric_op(user_value, clause_value, lambda { |a, b| a <= b }) + numeric_op(context_value, clause_value, lambda { |a, b| a <= b }) when :greaterThan - numeric_op(user_value, clause_value, lambda { |a, b| a > b }) + numeric_op(context_value, clause_value, lambda { |a, b| a > b }) when :greaterThanOrEqual - numeric_op(user_value, clause_value, lambda { |a, b| a >= b }) + numeric_op(context_value, clause_value, lambda { |a, b| a >= b }) when :before - date_op(user_value, clause_value, lambda { |a, b| a < b }) + date_op(context_value, clause_value, lambda { |a, b| a < b }) when :after - date_op(user_value, clause_value, lambda { |a, b| a > b }) + date_op(context_value, clause_value, lambda { |a, b| a > b }) when :semVerEqual - semver_op(user_value, clause_value, lambda { |a, b| a == b }) + semver_op(context_value, clause_value, lambda { |a, b| a == b }) when :semVerLessThan - semver_op(user_value, clause_value, lambda { |a, b| a < b }) + semver_op(context_value, clause_value, lambda { |a, b| a < b }) when :semVerGreaterThan - semver_op(user_value, clause_value, lambda { |a, b| a > b }) + semver_op(context_value, clause_value, lambda { |a, b| a > b }) when :segmentMatch # We should never reach this; it can't be evaluated based on just two parameters, because it requires - # looking up the segment from the data store. Instead, we special-case this operator in clause_match_user. + # looking up the segment from the data store. Instead, we special-case this operator in clause_match_context. false else false @@ -66,16 +66,16 @@ def self.apply(op, user_value, clause_value) NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :NUMERIC_VERSION_COMPONENTS_REGEX - def self.string_op(user_value, clause_value, fn) - (user_value.is_a? String) && (clause_value.is_a? String) && fn.call(user_value, clause_value) + def self.string_op(context_value, clause_value, fn) + (context_value.is_a? String) && (clause_value.is_a? String) && fn.call(context_value, clause_value) end - def self.numeric_op(user_value, clause_value, fn) - (user_value.is_a? Numeric) && (clause_value.is_a? Numeric) && fn.call(user_value, clause_value) + def self.numeric_op(context_value, clause_value, fn) + (context_value.is_a? Numeric) && (clause_value.is_a? Numeric) && fn.call(context_value, clause_value) end - def self.date_op(user_value, clause_value, fn) - ud = to_date(user_value) + def self.date_op(context_value, clause_value, fn) + ud = to_date(context_value) if !ud.nil? cd = to_date(clause_value) !cd.nil? && fn.call(ud, cd) @@ -84,8 +84,8 @@ def self.date_op(user_value, clause_value, fn) end end - def self.semver_op(user_value, clause_value, fn) - uv = to_semver(user_value) + def self.semver_op(context_value, clause_value, fn) + uv = to_semver(context_value) if !uv.nil? cv = to_semver(clause_value) !cv.nil? && fn.call(uv, cv) diff --git a/lib/ldclient-rb/impl/event_summarizer.rb b/lib/ldclient-rb/impl/event_summarizer.rb index 32703e69..4109f80a 100644 --- a/lib/ldclient-rb/impl/event_summarizer.rb +++ b/lib/ldclient-rb/impl/event_summarizer.rb @@ -10,7 +10,7 @@ module Impl EventSummaryFlagVariationCounter = Struct.new(:value, :count) # Manages the state of summarizable information for the EventProcessor, including the - # event counters and user deduplication. Note that the methods of this class are + # event counters and context deduplication. Note that the methods of this class are # deliberately not thread-safe; the EventProcessor is responsible for enforcing # synchronization across both the summarizer and the event queue. class EventSummarizer diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index b2660a3b..0995b36b 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -224,7 +224,7 @@ def unmarshal_item(kind, item) class DynamoDBBigSegmentStore < DynamoDBStoreImplBase KEY_METADATA = 'big_segments_metadata' - KEY_USER_DATA = 'big_segments_user' + KEY_CONTEXT_DATA = 'big_segments_user' ATTR_SYNC_TIME = 'synchronizedOn' ATTR_INCLUDED = 'included' ATTR_EXCLUDED = 'excluded' @@ -255,7 +255,7 @@ def get_membership(context_hash) data = @client.get_item( table_name: @table_name, key: { - PARTITION_KEY => @prefix + KEY_USER_DATA, + PARTITION_KEY => @prefix + KEY_CONTEXT_DATA, SORT_KEY => context_hash, }) return nil unless data.item diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index a9d55579..14739e97 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -109,7 +109,7 @@ def initialize(opts) @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented - @stopped = Concurrent::AtomicBoolean.new(false) + @stopped = Concurrent::AtomicBoolean.new() with_connection do |redis| @logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}") @@ -140,9 +140,7 @@ def stop redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url end max_connections = opts[:max_connections] || 16 - opts[:pool] || ConnectionPool.new(size: max_connections) do - ::Redis.new(redis_opts) - end + opts[:pool] || ConnectionPool.new(size: max_connections) { ::Redis.new(redis_opts) } end end @@ -258,8 +256,8 @@ def get_redis(redis, kind, key) # class RedisBigSegmentStore < RedisStoreImplBase KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on' - KEY_USER_INCLUDE = ':big_segment_include:' - KEY_USER_EXCLUDE = ':big_segment_exclude:' + KEY_CONTEXT_INCLUDE = ':big_segment_include:' + KEY_CONTEXT_EXCLUDE = ':big_segment_exclude:' def description "RedisBigSegmentStore" @@ -272,8 +270,8 @@ def get_metadata def get_membership(context_hash) with_connection do |redis| - included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + context_hash) - excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + context_hash) + included_refs = redis.smembers(@prefix + KEY_CONTEXT_INCLUDE + context_hash) + excluded_refs = redis.smembers(@prefix + KEY_CONTEXT_EXCLUDE + context_hash) if !included_refs && !excluded_refs nil else diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 29aedcdb..52e05cf3 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -54,7 +54,7 @@ def self.new_feature_store(table_name, opts = {}) # # Creates a DynamoDB-backed Big Segment store. # - # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # Big Segments are a specific type of segments. For more information, read the LaunchDarkly # documentation: https://docs.launchdarkly.com/home/users/big-segments # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or diff --git a/lib/ldclient-rb/integrations/file_data.rb b/lib/ldclient-rb/integrations/file_data.rb index 31aa235b..4c356667 100644 --- a/lib/ldclient-rb/integrations/file_data.rb +++ b/lib/ldclient-rb/integrations/file_data.rb @@ -25,7 +25,7 @@ module Integrations # # - `flags`: Feature flag definitions. # - `flagValues`: Simplified feature flags that contain only a value. - # - `segments`: User segment definitions. + # - `segments`: Context segment definitions. # # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application # and is subject to change. Rather than trying to construct these objects yourself, it is simpler diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 10f977a0..0e5bf68c 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -65,7 +65,7 @@ def self.new_feature_store(opts = {}) # # Creates a Redis-backed Big Segment store. # - # Big Segments are a specific type of user segments. For more information, read the LaunchDarkly + # Big Segments are a specific type of segments. For more information, read the LaunchDarkly # documentation: https://docs.launchdarkly.com/home/users/big-segments # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ffd97440..79a4f49c 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -59,7 +59,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) get_flag = lambda { |key| @store.get(FEATURES, key) } get_segment = lambda { |key| @store.get(SEGMENTS, key) } - get_big_segments_membership = lambda { |key| @big_segment_store_manager.get_user_membership(key) } + get_big_segments_membership = lambda { |key| @big_segment_store_manager.get_context_membership(key) } @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, get_big_segments_membership, @config.logger) if !@config.offline? && @config.send_events && !@config.diagnostic_opt_out? @@ -230,8 +230,8 @@ def identify(context) end # - # Tracks that a user performed an event. This method creates a "custom" analytics event - # containing the specified event name (key), user properties, and optional data. + # Tracks that a context performed an event. This method creates a "custom" analytics event + # containing the specified event name (key), context properties, and optional data. # # Note that event delivery is asynchronous, so the event may not actually be sent # until later; see {#flush}. diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb deleted file mode 100644 index 638bb4d0..00000000 --- a/lib/ldclient-rb/user_filter.rb +++ /dev/null @@ -1,52 +0,0 @@ -require "json" -require "set" - -module LaunchDarkly - # @private - class UserFilter - def initialize(config) - @all_attributes_private = config.all_attributes_private - @private_attribute_names = Set.new(config.private_attribute_names.map(&:to_sym)) - end - - def transform_user_props(user_props) - return nil if user_props.nil? - - user_private_attrs = Set.new((user_props[:privateAttributeNames] || []).map(&:to_sym)) - - filtered_user_props, removed = filter_values(user_props, user_private_attrs, ALLOWED_TOP_LEVEL_KEYS, IGNORED_TOP_LEVEL_KEYS) - custom = user_props[:custom] - unless custom.nil? - filtered_user_props[:custom], removed_custom = filter_values(custom, user_private_attrs) - removed.merge(removed_custom) - end - - unless removed.empty? - # note, :privateAttributeNames is what the developer sets; :privateAttrs is what we send to the server - filtered_user_props[:privateAttrs] = removed.to_a.sort.map { |s| s.to_s } - end - filtered_user_props - end - - private - - ALLOWED_TOP_LEVEL_KEYS = Set.new([:key, :secondary, :ip, :country, :email, - :firstName, :lastName, :avatar, :name, :anonymous, :custom]) - IGNORED_TOP_LEVEL_KEYS = Set.new([:custom, :key, :anonymous]) - - def filter_values(props, user_private_attrs, allowed_keys = [], keys_to_leave_as_is = []) - is_valid_key = lambda { |key| allowed_keys.empty? || allowed_keys.include?(key) } - removed_keys = Set.new(props.keys.select { |key| - # Note that if is_valid_key returns false, we don't explicitly *remove* the key (which would place - # it in the privateAttrs list) - we just silently drop it when we calculate filtered_hash. - is_valid_key.call(key) && !keys_to_leave_as_is.include?(key) && private_attr?(key, user_private_attrs) - }) - filtered_hash = props.select { |key, value| !removed_keys.include?(key) && is_valid_key.call(key) } - [filtered_hash, removed_keys] - end - - def private_attr?(name, user_private_attrs) - @all_attributes_private || @private_attribute_names.include?(name) || user_private_attrs.include?(name) - end - end -end diff --git a/spec/big_segment_store_spec_base.rb b/spec/big_segment_store_spec_base.rb index 5fb874b5..b7c627f1 100644 --- a/spec/big_segment_store_spec_base.rb +++ b/spec/big_segment_store_spec_base.rb @@ -17,8 +17,8 @@ # def set_big_segments_metadata(metadata) # # write the metadata to the database, taking @options[:prefix] into account # end -# def set_big_segments(user_hash, includes, excludes) -# # update the include and exclude lists for a user, taking @options[:prefix] into account +# def set_big_segments(context_hash, includes, excludes) +# # update the include and exclude lists for a context, taking @options[:prefix] into account # end # end # @@ -38,7 +38,7 @@ # The following tests are done for each permutation of (default prefix/specified prefix) let(:store_tester) { store_tester_class.new(prefix_options.merge(base_options)) } - let(:fake_user_hash) { "userhash" } + let(:fake_context_hash) { "contexthash" } def with_empty_store store_tester.clear_data @@ -73,7 +73,7 @@ def with_empty_store context "get_membership" do it "not found" do with_empty_store do |store| - membership = store.get_membership(fake_user_hash) + membership = store.get_membership(fake_context_hash) membership = {} if membership.nil? expect(membership).to eq({}) @@ -82,27 +82,27 @@ def with_empty_store it "includes only" do with_empty_store do |store| - store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], []) + store_tester.set_big_segments(fake_context_hash, ["key1", "key2"], []) - membership = store.get_membership(fake_user_hash) + membership = store.get_membership(fake_context_hash) expect(membership).to eq({ "key1" => true, "key2" => true }) end end it "excludes only" do with_empty_store do |store| - store_tester.set_big_segments(fake_user_hash, [], ["key1", "key2"]) + store_tester.set_big_segments(fake_context_hash, [], ["key1", "key2"]) - membership = store.get_membership(fake_user_hash) + membership = store.get_membership(fake_context_hash) expect(membership).to eq({ "key1" => false, "key2" => false }) end end it "includes and excludes" do with_empty_store do |store| - store_tester.set_big_segments(fake_user_hash, ["key1", "key2"], ["key2", "key3"]) + store_tester.set_big_segments(fake_context_hash, ["key1", "key2"], ["key2", "key3"]) - membership = store.get_membership(fake_user_hash) + membership = store.get_membership(fake_context_hash) expect(membership).to eq({ "key1" => true, "key2" => true, "key3" => false }) # include of key2 overrides exclude end end diff --git a/spec/context_spec.rb b/spec/context_spec.rb index 729f49d8..ae1c9cfd 100644 --- a/spec/context_spec.rb +++ b/spec/context_spec.rb @@ -68,7 +68,7 @@ expect(subject.create(context).valid?).to be false end - it "overwrite custom properties with built-ins when collisons occur" do + it "overwrite custom properties with built-ins when collisions occur" do context = { key: "user-key", ip: "192.168.1.1", @@ -161,7 +161,7 @@ end it "can be created from a hash" do - data = {kind: "multi", user: {key: "user-key"}, org: {key: "org-key"}} + data = { kind: "multi", user_context: { key: "user-key"}, org: { key: "org-key"}} multi_context = subject.create(data) expect(multi_context).to be_a(LaunchDarkly::LDContext) diff --git a/spec/events_test_util.rb b/spec/events_test_util.rb index 90fab2a8..672360b3 100644 --- a/spec/events_test_util.rb +++ b/spec/events_test_util.rb @@ -6,10 +6,10 @@ def make_eval_event(timestamp, context, key, version = nil, variation = nil, val default, track_events, debug_until, prereq_of) end -def make_identify_event(timestamp, user) - LaunchDarkly::Impl::IdentifyEvent.new(timestamp, user) +def make_identify_event(timestamp, context) + LaunchDarkly::Impl::IdentifyEvent.new(timestamp, context) end -def make_custom_event(timestamp, user, key, data = nil, metric_value = nil) - LaunchDarkly::Impl::CustomEvent.new(timestamp, user, key, data, metric_value) +def make_custom_event(timestamp, context, key, data = nil, metric_value = nil) + LaunchDarkly::Impl::CustomEvent.new(timestamp, context, key, data, metric_value) end diff --git a/spec/impl/big_segments_spec.rb b/spec/impl/big_segments_spec.rb index 2eebf843..920cf941 100644 --- a/spec/impl/big_segments_spec.rb +++ b/spec/impl/big_segments_spec.rb @@ -11,8 +11,8 @@ module Impl describe BigSegmentStoreManager do subject { BigSegmentStoreManager } - let(:user_key) { 'userkey' } - let(:user_hash) { subject.hash_for_user_key(user_key) } + let(:context_key) { 'contextkey' } + let(:context_hash) { subject.hash_for_context_key(context_key) } let(:null_logger) { double.as_null_object } def always_up_to_date @@ -37,12 +37,12 @@ def with_manager(config) expected_membership = { 'key1' => true, 'key2' => true } store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) - expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + expect(store).to receive(:get_membership).with(context_hash).and_return(expected_membership) allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) end end @@ -50,28 +50,28 @@ def with_manager(config) expected_membership = { 'key1' => true, 'key2' => true } store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) - expect(store).to receive(:get_membership).with(user_hash).once.and_return(expected_membership) + expect(store).to receive(:get_membership).with(context_hash).once.and_return(expected_membership) # the ".once" on this mock expectation is what verifies that the cache is working; there should only be one query allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) - expect(m.get_user_membership(user_key)).to eq(expected_result) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) end end it "can cache a nil result" do store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) - expect(store).to receive(:get_membership).with(user_hash).once.and_return(nil) + expect(store).to receive(:get_membership).with(context_hash).once.and_return(nil) # the ".once" on this mock expectation is what verifies that the cache is working; there should only be one query allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| expected_result = BigSegmentMembershipResult.new({}, BigSegmentsStatus::HEALTHY) - expect(m.get_user_membership(user_key)).to eq(expected_result) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) end end @@ -79,15 +79,15 @@ def with_manager(config) expected_membership = { 'key1' => true, 'key2' => true } store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) - expect(store).to receive(:get_membership).with(user_hash).twice.and_return(expected_membership) + expect(store).to receive(:get_membership).with(context_hash).twice.and_return(expected_membership) # the ".twice" on this mock expectation is what verifies that the cached result expired allow(store).to receive(:stop) - with_manager(BigSegmentsConfig.new(store: store, user_cache_time: 0.01)) do |m| + with_manager(BigSegmentsConfig.new(store: store, context_cache_time: 0.01)) do |m| expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::HEALTHY) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) sleep(0.1) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) end end @@ -95,12 +95,12 @@ def with_manager(config) expected_membership = { 'key1' => true, 'key2' => true } store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_stale) - expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + expect(store).to receive(:get_membership).with(context_hash).and_return(expected_membership) allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::STALE) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) end end @@ -108,58 +108,58 @@ def with_manager(config) expected_membership = { 'key1' => true, 'key2' => true } store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(nil) - expect(store).to receive(:get_membership).with(user_hash).and_return(expected_membership) + expect(store).to receive(:get_membership).with(context_hash).and_return(expected_membership) allow(store).to receive(:stop) with_manager(BigSegmentsConfig.new(store: store)) do |m| expected_result = BigSegmentMembershipResult.new(expected_membership, BigSegmentsStatus::STALE) - expect(m.get_user_membership(user_key)).to eq(expected_result) + expect(m.get_context_membership(context_key)).to eq(expected_result) end end - it "least recent user is evicted from cache" do - user_key_1, user_key_2, user_key_3 = 'userkey1', 'userkey2', 'userkey3' - user_hash_1, user_hash_2, user_hash_3 = subject.hash_for_user_key(user_key_1), - subject.hash_for_user_key(user_key_2), subject.hash_for_user_key(user_key_3) + it "least recent context is evicted from cache" do + context_key_1, context_key_2, context_key_3 = 'contextkey1', 'contextkey2', 'contextkey3' + context_hash_1, context_hash_2, context_hash_3 = subject.hash_for_context_key(context_key_1), + subject.hash_for_context_key(context_key_2), subject.hash_for_context_key(context_key_3) memberships = { - user_hash_1 => { 'seg1': true }, - user_hash_2 => { 'seg2': true }, - user_hash_3 => { 'seg3': true }, + context_hash_1 => { 'seg1': true }, + context_hash_2 => { 'seg2': true }, + context_hash_3 => { 'seg3': true }, } - queried_users = [] + queries_contexts = [] store = double expect(store).to receive(:get_metadata).at_least(:once).and_return(always_up_to_date) expect(store).to receive(:get_membership).exactly(4).times do |key| - queried_users << key + queries_contexts << key memberships[key] end allow(store).to receive(:stop) - with_manager(BigSegmentsConfig.new(store: store, user_cache_size: 2)) do |m| - result1 = m.get_user_membership(user_key_1) - result2 = m.get_user_membership(user_key_2) - result3 = m.get_user_membership(user_key_3) - expect(result1).to eq(BigSegmentMembershipResult.new(memberships[user_hash_1], BigSegmentsStatus::HEALTHY)) - expect(result2).to eq(BigSegmentMembershipResult.new(memberships[user_hash_2], BigSegmentsStatus::HEALTHY)) - expect(result3).to eq(BigSegmentMembershipResult.new(memberships[user_hash_3], BigSegmentsStatus::HEALTHY)) + with_manager(BigSegmentsConfig.new(store: store, context_cache_size: 2)) do |m| + result1 = m.get_context_membership(context_key_1) + result2 = m.get_context_membership(context_key_2) + result3 = m.get_context_membership(context_key_3) + expect(result1).to eq(BigSegmentMembershipResult.new(memberships[context_hash_1], BigSegmentsStatus::HEALTHY)) + expect(result2).to eq(BigSegmentMembershipResult.new(memberships[context_hash_2], BigSegmentsStatus::HEALTHY)) + expect(result3).to eq(BigSegmentMembershipResult.new(memberships[context_hash_3], BigSegmentsStatus::HEALTHY)) - expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) + expect(queries_contexts).to eq([context_hash_1, context_hash_2, context_hash_3]) - # Since the capacity is only 2 and user_key_1 was the least recently used, that key should be - # evicted by the user_key_3 query. Now only user_key_2 and user_key_3 are in the cache, and + # Since the capacity is only 2 and context_key_1 was the least recently used, that key should be + # evicted by the context_key_3 query. Now only context_key_2 and context_key_3 are in the cache, and # querying them again should not cause a new query to the store. - result2a = m.get_user_membership(user_key_2) - result3a = m.get_user_membership(user_key_3) + result2a = m.get_context_membership(context_key_2) + result3a = m.get_context_membership(context_key_3) expect(result2a).to eq(result2) expect(result3a).to eq(result3) - expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3]) + expect(queries_contexts).to eq([context_hash_1, context_hash_2, context_hash_3]) - result1a = m.get_user_membership(user_key_1) + result1a = m.get_context_membership(context_key_1) expect(result1a).to eq(result1) - expect(queried_users).to eq([user_hash_1, user_hash_2, user_hash_3, user_hash_1]) + expect(queries_contexts).to eq([context_hash_1, context_hash_2, context_hash_3, context_hash_1]) end end end diff --git a/spec/impl/context_spec.rb b/spec/impl/context_spec.rb index 4dba4df1..ce9d6ff5 100644 --- a/spec/impl/context_spec.rb +++ b/spec/impl/context_spec.rb @@ -5,7 +5,7 @@ it "can validate kind correctly" do test_cases = [ - [:user, LaunchDarkly::Impl::Context::ERR_KIND_NON_STRING], + [:user_context, LaunchDarkly::Impl::Context::ERR_KIND_NON_STRING], ["kind", LaunchDarkly::Impl::Context::ERR_KIND_CANNOT_BE_KIND], ["multi", LaunchDarkly::Impl::Context::ERR_KIND_CANNOT_BE_MULTI], ["user@type", LaunchDarkly::Impl::Context::ERR_KIND_INVALID_CHARS], diff --git a/spec/impl/evaluator_big_segments_spec.rb b/spec/impl/evaluator_big_segments_spec.rb index 4fca96ca..3809f61f 100644 --- a/spec/impl/evaluator_big_segments_spec.rb +++ b/spec/impl/evaluator_big_segments_spec.rb @@ -10,7 +10,7 @@ module Impl it "segment is not matched if there is no way to query it" do segment = Segments.from_hash({ key: 'test', - included: [ user.key ], # included should be ignored for a big segment + included: [user_context.key ], # included should be ignored for a big segment version: 1, unbounded: true, generation: 1, @@ -19,7 +19,7 @@ module Impl .with_segment(segment) .build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) end @@ -27,7 +27,7 @@ module Impl it "segment with no generation is not matched" do segment = Segments.from_hash({ key: 'test', - included: [ user.key ], # included should be ignored for a big segment + included: [user_context.key ], # included should be ignored for a big segment version: 1, unbounded: true, }) @@ -35,7 +35,7 @@ module Impl .with_segment(segment) .build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::NOT_CONFIGURED) end @@ -49,10 +49,10 @@ module Impl }) e = EvaluatorBuilder.new(logger) .with_segment(segment) - .with_big_segment_for_user(user, segment, true) + .with_big_segment_for_context(user_context, segment, true) .build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end @@ -64,15 +64,15 @@ module Impl unbounded: true, generation: 2, rules: [ - { clauses: [ Clauses.match_user(user) ] }, + { clauses: [ Clauses.match_context(user_context) ] }, ], }) e = EvaluatorBuilder.new(logger) .with_segment(segment) - .with_big_segment_for_user(user, segment, nil) + .with_big_segment_for_context(user_context, segment, nil) .build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end @@ -84,15 +84,15 @@ module Impl unbounded: true, generation: 2, rules: [ - { clauses: [ Clauses.match_user(user) ] }, + { clauses: [ Clauses.match_context(user_context) ] }, ], }) e = EvaluatorBuilder.new(logger) .with_segment(segment) - .with_big_segment_for_user(user, segment, false) + .with_big_segment_for_context(user_context, segment, false) .build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be false expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) end @@ -106,11 +106,11 @@ module Impl }) e = EvaluatorBuilder.new(logger) .with_segment(segment) - .with_big_segment_for_user(user, segment, true) + .with_big_segment_for_context(user_context, segment, true) .with_big_segments_status(BigSegmentsStatus::STALE) .build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::STALE) end @@ -142,17 +142,17 @@ module Impl queries = [] e = EvaluatorBuilder.new(logger) .with_segment(segment1).with_segment(segment2) - .with_big_segment_for_user(user, segment2, true) + .with_big_segment_for_context(user_context, segment2, true) .record_big_segments_queries(queries) .build # The membership deliberately does not include segment1, because we want the first rule to be # a non-match so that it will continue on and check segment2 as well. - result = e.evaluate(flag, user) + result = e.evaluate(flag, user_context) expect(result.detail.value).to be true expect(result.detail.reason.big_segments_status).to be(BigSegmentsStatus::HEALTHY) - expect(queries).to eq([ user.key ]) + expect(queries).to eq([user_context.key ]) end end end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index e7878d0b..005dd888 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -4,120 +4,120 @@ describe LaunchDarkly::Impl::EvaluatorBucketing do subject { LaunchDarkly::Impl::EvaluatorBucketing } - describe "bucket_user" do + describe "bucket_context" do describe "seed exists" do let(:seed) { 61 } it "returns the expected bucket values for seed" do - user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) + context = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.09801207) - user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) + context = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.14483777) - user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) + context = LaunchDarkly::LDContext.create({ key: "userKeyC" }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.9242641) end it "returns the same bucket regardless of hashKey and salt" do - user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket1 = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_context(user, user.kind, "hashKey1", "key", "saltyB", seed) - bucket3 = subject.bucket_context(user, user.kind, "hashKey2", "key", "saltyC", seed) + context = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket1 = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(context, context.kind, "hashKey1", "key", "saltyB", seed) + bucket3 = subject.bucket_context(context, context.kind, "hashKey2", "key", "saltyC", seed) expect(bucket1).to eq(bucket2) expect(bucket2).to eq(bucket3) end it "returns a different bucket if the seed is not the same" do - user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket1 = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_context(user, user.kind, "hashKey1", "key", "saltyB", seed+1) + context = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket1 = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(context, context.kind, "hashKey1", "key", "saltyB", seed+1) expect(bucket1).to_not eq(bucket2) end - it "returns a different bucket if the user is not the same" do - user1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - user2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - bucket1 = subject.bucket_context(user1, user1.kind, "hashKey", "key", "saltyA", seed) - bucket2 = subject.bucket_context(user2, user2.kind, "hashKey1", "key", "saltyB", seed) + it "returns a different bucket if the context is not the same" do + context1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + context2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + bucket1 = subject.bucket_context(context1, context1.kind, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_context(context2, context2.kind, "hashKey1", "key", "saltyB", seed) expect(bucket1).to_not eq(bucket2) end end it "gets expected bucket values for specific keys" do - user = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) + context = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.42157587) - user = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) + context = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.6708485) - user = LaunchDarkly::LDContext.create({ key: "userKeyC" }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "key", "saltyA", nil) + context = LaunchDarkly::LDContext.create({ key: "userKeyC" }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.10343106) end it "treats the bucket by attribute as a reference when a context kind isn't specified" do - user = LaunchDarkly::LDContext.create({ key: "userKeyB", kind: "user", address: { street: "123 Easy St", city: "Anytown" } }) - bucket = subject.bucket_context(user, user.kind, "hashKey", "/address/street", "saltyA", nil) + context = LaunchDarkly::LDContext.create({ key: "userKeyB", kind: "user", address: { street: "123 Easy St", city: "Anytown" } }) + bucket = subject.bucket_context(context, context.kind, "hashKey", "/address/street", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.56809287) - bucket = subject.bucket_context(user, nil, "hashKey", "/address/street", "saltyA", nil) + bucket = subject.bucket_context(context, nil, "hashKey", "/address/street", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0) end it "can bucket by int value (equivalent to string)" do - user = LaunchDarkly::LDContext.create({ + context = LaunchDarkly::LDContext.create({ key: "userkey", custom: { stringAttr: "33333", intAttr: 33333, }, }) - stringResult = subject.bucket_context(user, user.kind, "hashKey", "stringAttr", "saltyA", nil) - intResult = subject.bucket_context(user, user.kind, "hashKey", "intAttr", "saltyA", nil) + stringResult = subject.bucket_context(context, context.kind, "hashKey", "stringAttr", "saltyA", nil) + intResult = subject.bucket_context(context, context.kind, "hashKey", "intAttr", "saltyA", nil) expect(intResult).to be_within(0.0000001).of(0.54771423) expect(intResult).to eq(stringResult) end it "cannot bucket by float value" do - user = LaunchDarkly::LDContext.create({ + context = LaunchDarkly::LDContext.create({ key: "userkey", custom: { floatAttr: 33.5, }, }) - result = subject.bucket_context(user, user.kind, "hashKey", "floatAttr", "saltyA", nil) + result = subject.bucket_context(context, context.kind, "hashKey", "floatAttr", "saltyA", nil) expect(result).to eq(0.0) end it "cannot bucket by bool value" do - user = LaunchDarkly::LDContext.create({ + context = LaunchDarkly::LDContext.create({ key: "userkey", custom: { boolAttr: true, }, }) - result = subject.bucket_context(user, user.kind, "hashKey", "boolAttr", "saltyA", nil) + result = subject.bucket_context(context, context.kind, "hashKey", "boolAttr", "saltyA", nil) expect(result).to eq(0.0) end end - describe "variation_index_for_user" do + describe "variation_index_for_context" do context "rollout is not an experiment" do it "matches bucket" do - user = LaunchDarkly::LDContext.create({ key: "userkey" }) + context = LaunchDarkly::LDContext.create({ key: "userkey" }) flag_key = "flagkey" salt = "salt" # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_context(context, context.kind, flag_key, "key", salt, nil) * 100000).truncate() expect(bucket_value).to be > 0 expect(bucket_value).to be < 100000 @@ -134,17 +134,17 @@ }) flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, context) expect(result_variation).to be matched_variation expect(inExperiment).to be(false) end it "uses last bucket if bucket value is equal to total weight" do - user = LaunchDarkly::LDContext.create({ key: "userkey" }) + context = LaunchDarkly::LDContext.create({ key: "userkey" }) flag_key = "flagkey" salt = "salt" - bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_context(context, context.kind, flag_key, "key", salt, nil) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value vr = LaunchDarkly::Impl::Model::VariationOrRollout.new(nil, @@ -155,7 +155,7 @@ }) flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, context) expect(result_variation).to be 0 expect(inExperiment).to be(false) end @@ -163,10 +163,10 @@ end context "rollout is an experiment" do - it "returns whether user is in the experiment or not" do - user1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) - user2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) - user3 = LaunchDarkly::LDContext.create({ key: "userKeyC" }) + it "returns whether context is in the experiment or not" do + context1 = LaunchDarkly::LDContext.create({ key: "userKeyA" }) + context2 = LaunchDarkly::LDContext.create({ key: "userKeyB" }) + context3 = LaunchDarkly::LDContext.create({ key: "userKeyC" }) flag_key = "flagkey" salt = "salt" seed = 61 @@ -183,24 +183,24 @@ }) flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user1) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, context1) expect(result_variation).to be(0) expect(inExperiment).to be(true) - result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user2) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, context2) expect(result_variation).to be(2) expect(inExperiment).to be(true) - result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user3) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, context3) expect(result_variation).to be(0) expect(inExperiment).to be(false) end it "uses last bucket if bucket value is equal to total weight" do - user = LaunchDarkly::LDContext.create({ key: "userkey" }) + context = LaunchDarkly::LDContext.create({ key: "userkey" }) flag_key = "flagkey" salt = "salt" seed = 61 - bucket_value = (subject.bucket_context(user, user.kind, flag_key, "key", salt, seed) * 100000).truncate() + bucket_value = (subject.bucket_context(context, context.kind, flag_key, "key", salt, seed) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value vr = LaunchDarkly::Impl::Model::VariationOrRollout.new(nil, @@ -213,7 +213,7 @@ }) flag = Flags.from_hash({ key: flag_key, salt: salt }) - result_variation, inExperiment = subject.variation_index_for_context(flag, vr, user) + result_variation, inExperiment = subject.variation_index_for_context(flag, vr, context) expect(result_variation).to be 0 expect(inExperiment).to be(true) end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb index 0aac6890..bc23c3d7 100644 --- a/spec/impl/evaluator_segment_spec.rb +++ b/spec/impl/evaluator_segment_spec.rb @@ -22,19 +22,19 @@ def test_segment_match(segment, context) } e = EvaluatorBuilder.new(logger).with_segment(segment).build flag = Flags.boolean_flag_with_clauses(Clauses.match_segment(segment)) - expect(e.evaluate(flag, user).detail.value).to be true + expect(e.evaluate(flag, user_context).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do e = EvaluatorBuilder.new(logger).with_unknown_segment('segkey').build clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = Flags.boolean_flag_with_clauses(clause) - expect(e.evaluate(flag, user).detail.value).to be false + expect(e.evaluate(flag, user_context).detail.value).to be false end - it 'explicitly includes user' do - segment = SegmentBuilder.new('segkey').included(user.key).build - expect(test_segment_match(segment, user)).to be true + it 'explicitly includes context' do + segment = SegmentBuilder.new('segkey').included(user_context.key).build + expect(test_segment_match(segment, user_context)).to be true end it 'explicitly includes a specific context kind' do @@ -51,9 +51,9 @@ def test_segment_match(segment, context) expect(test_segment_match(segment, multi_context)).to be true end - it 'explicitly excludes user' do - segment = SegmentBuilder.new('segkey').excluded(user.key).build - expect(test_segment_match(segment, user)).to be false + it 'explicitly excludes context' do + segment = SegmentBuilder.new('segkey').excluded(user_context.key).build + expect(test_segment_match(segment, user_context)).to be false end it 'explicitly excludes a specific context kind' do @@ -61,8 +61,8 @@ def test_segment_match(segment, context) device_context = LDContext::create({ key: "devicekey", kind: "device" }) multi_context = LDContext::create_multi([org_context, device_context]) - org_clause = Clauses.match_user(org_context, :key) - device_clause = Clauses.match_user(device_context, :key) + org_clause = Clauses.match_context(org_context, :key) + device_clause = Clauses.match_context(device_context, :key) segment = SegmentBuilder.new('segkey') .excluded_contexts("org", "orgkey") .rule({ clauses: [ org_clause ]}) @@ -74,69 +74,69 @@ def test_segment_match(segment, context) expect(test_segment_match(segment, multi_context)).to be false end - it 'both includes and excludes user; include takes priority' do - segment = SegmentBuilder.new('segkey').included(user.key).excluded(user.key).build - expect(test_segment_match(segment, user)).to be true + it 'both includes and excludes context; include takes priority' do + segment = SegmentBuilder.new('segkey').included(user_context.key).excluded(user_context.key).build + expect(test_segment_match(segment, user_context)).to be true end - it 'matches user by rule when weight is absent' do - segClause = Clauses.match_user(user, :email) + it 'matches context by rule when weight is absent' do + segClause = Clauses.match_context(user_context, :email) segRule = { clauses: [ segClause ], } segment = SegmentBuilder.new('segkey').rule(segRule).build - expect(test_segment_match(segment, user)).to be true + expect(test_segment_match(segment, user_context)).to be true end - it 'matches user by rule when weight is nil' do - segClause = Clauses.match_user(user, :email) + it 'matches context by rule when weight is nil' do + segClause = Clauses.match_context(user_context, :email) segRule = { clauses: [ segClause ], weight: nil, } segment = SegmentBuilder.new('segkey').rule(segRule).build - expect(test_segment_match(segment, user)).to be true + expect(test_segment_match(segment, user_context)).to be true end - it 'matches user with full rollout' do - segClause = Clauses.match_user(user, :email) + it 'matches context with full rollout' do + segClause = Clauses.match_context(user_context, :email) segRule = { clauses: [ segClause ], weight: 100000, } segment = SegmentBuilder.new('segkey').rule(segRule).build - expect(test_segment_match(segment, user)).to be true + expect(test_segment_match(segment, user_context)).to be true end - it "doesn't match user with zero rollout" do - segClause = Clauses.match_user(user, :email) + it "doesn't match context with zero rollout" do + segClause = Clauses.match_context(user_context, :email) segRule = { clauses: [ segClause ], weight: 0, } segment = SegmentBuilder.new('segkey').rule(segRule).build - expect(test_segment_match(segment, user)).to be false + expect(test_segment_match(segment, user_context)).to be false end - it "matches user with multiple clauses" do - segClause1 = Clauses.match_user(user, :email) - segClause2 = Clauses.match_user(user, :name) + it "matches context with multiple clauses" do + segClause1 = Clauses.match_context(user_context, :email) + segClause2 = Clauses.match_context(user_context, :name) segRule = { clauses: [ segClause1, segClause2 ], } segment = SegmentBuilder.new('segkey').rule(segRule).build - expect(test_segment_match(segment, user)).to be true + expect(test_segment_match(segment, user_context)).to be true end - it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = Clauses.match_user(user, :email) - segClause2 = Clauses.match_user(user, :name) + it "doesn't match context with multiple clauses if a clause doesn't match" do + segClause1 = Clauses.match_context(user_context, :email) + segClause2 = Clauses.match_context(user_context, :name) segClause2[:values] = [ 'wrong' ] segRule = { clauses: [ segClause1, segClause2 ], } segment = SegmentBuilder.new('segkey').rule(segRule).build - expect(test_segment_match(segment, user)).to be false + expect(test_segment_match(segment, user_context)).to be false end (1..4).each do |depth| diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 808da1f1..0ae1747a 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -36,10 +36,10 @@ def with_unknown_segment(key) self end - def with_big_segment_for_user(user, segment, included) - user_key = user.key - @big_segment_memberships[user_key] = {} unless @big_segment_memberships.has_key?(user_key) - @big_segment_memberships[user_key][Evaluator.make_big_segment_ref(segment)] = included + def with_big_segment_for_context(context, segment, included) + context_key = context.key + @big_segment_memberships[context_key] = {} unless @big_segment_memberships.has_key?(context_key) + @big_segment_memberships[context_key][Evaluator.make_big_segment_ref(segment)] = included self end @@ -77,7 +77,7 @@ def build end module EvaluatorSpecBase - def user + def user_context LDContext::create({ key: "userkey", email: "test@example.com", diff --git a/spec/integrations/dynamodb_stores_spec.rb b/spec/integrations/dynamodb_stores_spec.rb index a4429497..f12fa63a 100644 --- a/spec/integrations/dynamodb_stores_spec.rb +++ b/spec/integrations/dynamodb_stores_spec.rb @@ -108,7 +108,7 @@ def set_big_segments_metadata(metadata) ) end - def set_big_segments(user_hash, includes, excludes) + def set_big_segments(context_hash, includes, excludes) client = self.class.create_test_client sets = { $DynamoDBBigSegmentStore::ATTR_INCLUDED => Set.new(includes), @@ -119,8 +119,8 @@ def set_big_segments(user_hash, includes, excludes) client.update_item( table_name: TABLE_NAME, key: { - "namespace" => @actual_prefix + $DynamoDBBigSegmentStore::KEY_USER_DATA, - "key" => user_hash, + "namespace" => @actual_prefix + $DynamoDBBigSegmentStore::KEY_CONTEXT_DATA, + "key" => context_hash, }, update_expression: "ADD #{attr_name} :value", expression_attribute_values: { diff --git a/spec/integrations/redis_stores_spec.rb b/spec/integrations/redis_stores_spec.rb index f52b3643..d5c503bc 100644 --- a/spec/integrations/redis_stores_spec.rb +++ b/spec/integrations/redis_stores_spec.rb @@ -46,13 +46,13 @@ def set_big_segments_metadata(metadata) end end - def set_big_segments(user_hash, includes, excludes) + def set_big_segments(context_hash, includes, excludes) with_redis_test_client do |client| includes.each do |ref| - client.sadd?(@actual_prefix + $RedisBigSegmentStore::KEY_USER_INCLUDE + user_hash, ref) + client.sadd?(@actual_prefix + $RedisBigSegmentStore::KEY_CONTEXT_INCLUDE + context_hash, ref) end excludes.each do |ref| - client.sadd?(@actual_prefix + $RedisBigSegmentStore::KEY_USER_EXCLUDE + user_hash, ref) + client.sadd?(@actual_prefix + $RedisBigSegmentStore::KEY_CONTEXT_EXCLUDE + context_hash, ref) end end end diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index 4ba71994..1b088d06 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -66,7 +66,7 @@ module LaunchDarkly ).build) segstore = MockBigSegmentStore.new - segstore.setup_segment_for_user(basic_context.key, segment, true) + segstore.setup_segment_for_context(basic_context.key, segment, true) big_seg_config = BigSegmentsConfig.new(store: segstore) with_client(test_config(data_source: td, big_segments: big_seg_config)) do |client| @@ -132,7 +132,7 @@ module LaunchDarkly ).build) segstore = MockBigSegmentStore.new - segstore.setup_segment_for_user(basic_context.key, segment, true) + segstore.setup_segment_for_context(basic_context.key, segment, true) segstore.setup_metadata(Time.now) big_seg_config = BigSegmentsConfig.new(store: segstore) @@ -239,7 +239,7 @@ module LaunchDarkly end end - it "returns empty state for nil user" do + it "returns empty state for nil context" do with_client(test_config(data_source: test_data)) do |client| state = client.all_flags_state(nil) expect(state.valid?).to be false @@ -247,7 +247,7 @@ module LaunchDarkly end end - it "returns empty state for nil user key" do + it "returns empty state for nil context key" do with_client(test_config(data_source: test_data)) do |client| state = client.all_flags_state({}) expect(state.valid?).to be false diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index 9dfcae2c..fcf95536 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -41,7 +41,7 @@ def event_processor(client) end end - it "does not send event, and logs error, if user is nil" do + it "does not send event, and logs error, if context is nil" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) @@ -54,7 +54,7 @@ def event_processor(client) end end - it "does not send event, and logs error, if user key is nil" do + it "does not send event, and logs error, if context key is nil" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) @@ -73,7 +73,7 @@ def event_processor(client) td.use_preconfigured_flag( FlagBuilder.new("flagkey").version(100).on(true).variations("value") .rule(RuleBuilder.new.variation(0).id("id").track_events(true) - .clause(Clauses.match_user(basic_context))) + .clause(Clauses.match_context(basic_context))) .build ) @@ -132,7 +132,7 @@ def event_processor(client) end end - it "does not send event, and logs error, if user is nil" do + it "does not send event, and logs error, if context is nil" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) @@ -145,7 +145,7 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user key is nil" do + it "does not send event, and logs warning, if context key is nil" do td = Integrations::TestData.data_source td.update(td.flag("flagkey").variations("value").on(false).off_variation(0)) @@ -168,7 +168,7 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user is nil" do + it "does not send event, and logs warning, if context is nil" do logger = double().as_null_object with_client(test_config(logger: logger)) do |client| @@ -178,7 +178,7 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user key is blank" do + it "does not send event, and logs warning, if context key is blank" do logger = double().as_null_object with_client(test_config(logger: logger)) do |client| @@ -210,7 +210,7 @@ def event_processor(client) end end - it "does not send event, and logs a warning, if user is nil" do + it "does not send event, and logs a warning, if context is nil" do logger = double().as_null_object with_client(test_config(logger: logger)) do |client| @@ -220,7 +220,7 @@ def event_processor(client) end end - it "does not send event, and logs warning, if user key is nil" do + it "does not send event, and logs warning, if context key is nil" do logger = double().as_null_object with_client(test_config(logger: logger)) do |client| diff --git a/spec/mock_components.rb b/spec/mock_components.rb index 9570e0b2..38b1afcb 100644 --- a/spec/mock_components.rb +++ b/spec/mock_components.rb @@ -77,13 +77,8 @@ def setup_metadata_error(ex) @metadata_error = ex end - def setup_membership(user_key, membership) - user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) - @memberships[user_hash] = membership - end - - def setup_segment_for_user(user_key, segment, included) - user_hash = Impl::BigSegmentStoreManager.hash_for_user_key(user_key) + def setup_segment_for_context(user_key, segment, included) + user_hash = Impl::BigSegmentStoreManager.hash_for_context_key(user_key) @memberships[user_hash] ||= {} @memberships[user_hash][Impl::Evaluator.make_big_segment_ref(segment)] = included end diff --git a/spec/model_builders.rb b/spec/model_builders.rb index 7ec784ef..110e184e 100644 --- a/spec/model_builders.rb +++ b/spec/model_builders.rb @@ -33,12 +33,12 @@ def self.match_segment(segment) } end - def self.match_user(user, attr = :key) + def self.match_context(context, attr = :key) { "attribute": attr.to_s, "op": "in", - "values": [ user.get_value(attr) ], - "contextKind": user.individual_context(0).kind, + "values": [ context.get_value(attr) ], + "contextKind": context.individual_context(0).kind, } end end diff --git a/spec/user_filter_spec.rb b/spec/user_filter_spec.rb deleted file mode 100644 index 96814289..00000000 --- a/spec/user_filter_spec.rb +++ /dev/null @@ -1,91 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::UserFilter do - subject { LaunchDarkly::UserFilter } - - let(:base_config) { LaunchDarkly::Config.new } - let(:config_with_all_attrs_private) { LaunchDarkly::Config.new({ all_attributes_private: true })} - let(:config_with_some_attrs_private) { LaunchDarkly::Config.new({ private_attribute_names: ['firstName', 'bizzle'] })} - - # users to serialize - - let(:user) { - { key: 'abc', firstName: 'Sue', custom: { bizzle: 'def', dizzle: 'ghi' }} - } - - let(:user_specifying_own_private_attr) { - u = user.clone - u[:privateAttributeNames] = [ 'dizzle', 'unused' ] - u - } - - let(:user_with_unknown_top_level_attrs) { - { key: 'abc', firstName: 'Sue', species: 'human', hatSize: 6, custom: { bizzle: 'def', dizzle: 'ghi' }} - } - - let(:anon_user) { - { key: 'abc', anonymous: 'true', custom: { bizzle: 'def', dizzle: 'ghi' }} - } - - # expected results from serializing user - - let(:user_with_all_attrs_hidden) { - { key: 'abc', custom: { }, privateAttrs: [ 'bizzle', 'dizzle', 'firstName' ]} - } - - let(:user_with_some_attrs_hidden) { - { key: 'abc', custom: { dizzle: 'ghi' }, privateAttrs: [ 'bizzle', 'firstName' ]} - } - - let(:user_with_own_specified_attr_hidden) { - { key: 'abc', firstName: 'Sue', custom: { bizzle: 'def' }, privateAttrs: [ 'dizzle' ]} - } - - let(:anon_user_with_all_attrs_hidden) { - { key: 'abc', anonymous: 'true', custom: { }, privateAttrs: [ 'bizzle', 'dizzle' ]} - } - - describe "serialize_events" do - it "includes all user attributes by default" do - uf = LaunchDarkly::UserFilter.new(base_config) - result = uf.transform_user_props(user) - expect(result).to eq user - end - - it "hides all except key if all_attributes_private is true" do - uf = LaunchDarkly::UserFilter.new(config_with_all_attrs_private) - result = uf.transform_user_props(user) - expect(result).to eq user_with_all_attrs_hidden - end - - it "hides some attributes if private_attribute_names is set" do - uf = LaunchDarkly::UserFilter.new(config_with_some_attrs_private) - result = uf.transform_user_props(user) - expect(result).to eq user_with_some_attrs_hidden - end - - it "hides attributes specified in per-user privateAttrs" do - uf = LaunchDarkly::UserFilter.new(base_config) - result = uf.transform_user_props(user_specifying_own_private_attr) - expect(result).to eq user_with_own_specified_attr_hidden - end - - it "looks at both per-user privateAttrs and global config" do - uf = LaunchDarkly::UserFilter.new(config_with_some_attrs_private) - result = uf.transform_user_props(user_specifying_own_private_attr) - expect(result).to eq user_with_all_attrs_hidden - end - - it "strips out any unknown top-level attributes" do - uf = LaunchDarkly::UserFilter.new(base_config) - result = uf.transform_user_props(user_with_unknown_top_level_attrs) - expect(result).to eq user - end - - it "leaves the anonymous attribute as is" do - uf = LaunchDarkly::UserFilter.new(config_with_all_attrs_private) - result = uf.transform_user_props(anon_user) - expect(result).to eq anon_user_with_all_attrs_hidden - end - end -end From 7e732a2f2dcb3e8eddc05a87adde469c58d4248b Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Wed, 21 Dec 2022 14:48:08 -0600 Subject: [PATCH 289/292] Remove new relic integration (#233) The new relic integration was removed many versions ago but a small trace remained behind. --- lib/ldclient-rb.rb | 1 - lib/ldclient-rb/newrelic.rb | 17 ----------------- spec/newrelic_spec.rb | 5 ----- 3 files changed, 23 deletions(-) delete mode 100644 lib/ldclient-rb/newrelic.rb delete mode 100644 spec/newrelic_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 37359e5a..80d5adec 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -17,7 +17,6 @@ module LaunchDarkly require "ldclient-rb/config" require "ldclient-rb/context" require "ldclient-rb/reference" -require "ldclient-rb/newrelic" require "ldclient-rb/stream" require "ldclient-rb/polling" require "ldclient-rb/simple_lru_cache" diff --git a/lib/ldclient-rb/newrelic.rb b/lib/ldclient-rb/newrelic.rb deleted file mode 100644 index 5c9b7d48..00000000 --- a/lib/ldclient-rb/newrelic.rb +++ /dev/null @@ -1,17 +0,0 @@ -module LaunchDarkly - # @private - class LDNewRelic - begin - require "newrelic_rpm" - NR_ENABLED = defined?(::NewRelic::Agent.add_custom_parameters) - rescue ScriptError, StandardError - NR_ENABLED = false - end - - def self.annotate_transaction(key, value) - if NR_ENABLED - ::NewRelic::Agent.add_custom_parameters(key.to_s => value.to_s) - end - end - end -end diff --git a/spec/newrelic_spec.rb b/spec/newrelic_spec.rb deleted file mode 100644 index f20afa42..00000000 --- a/spec/newrelic_spec.rb +++ /dev/null @@ -1,5 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::LDNewRelic do - subject { LaunchDarkly::LDNewRelic } -end From 5530092f28a22f3438efcf45fcc46b995cd6bf08 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Wed, 28 Dec 2022 13:28:27 -0600 Subject: [PATCH 290/292] Rename config option private_attribute_names (#234) Co-authored-by: Eli Bishop --- contract-tests/client_entity.rb | 4 ++-- lib/ldclient-rb/config.rb | 19 ++++++++++++++++--- lib/ldclient-rb/events.rb | 2 +- spec/events_spec.rb | 6 +++--- 4 files changed, 22 insertions(+), 9 deletions(-) diff --git a/contract-tests/client_entity.rb b/contract-tests/client_entity.rb index 487477cb..dc2043d4 100644 --- a/contract-tests/client_entity.rb +++ b/contract-tests/client_entity.rb @@ -28,8 +28,8 @@ def initialize(log, config) opts[:events_uri] = events[:baseUri] if events[:baseUri] opts[:capacity] = events[:capacity] if events[:capacity] opts[:diagnostic_opt_out] = !events[:enableDiagnostics] - opts[:all_attributes_private] = !!events[:allAttributesPrivate] - opts[:private_attribute_names] = events[:globalPrivateAttributes] + opts[:all_attributes_private] = !!events[:allAttributesPrivate] + opts[:private_attributes] = events[:globalPrivateAttributes] opts[:flush_interval] = (events[:flushIntervalMs] / 1_000) unless events[:flushIntervalMs].nil? else opts[:send_events] = false diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index b54afa24..498d33d6 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -21,6 +21,10 @@ class Config # In the event both the user and context variations are provided, the # context specific configuration option will take precedence. # + # Similarly, `private_attribute_names` is deprecated. Newer code should + # prefer `private_attributes`. If both are provided, `private_attributes` + # will take precedence. + # # @param opts [Hash] the configuration options # @option opts [Logger] :logger See {#logger}. # @option opts [String] :base_uri ("https://sdk.launchdarkly.com") See {#base_uri}. @@ -39,6 +43,7 @@ class Config # @option opts [Boolean] :stream (true) See {#stream?}. # @option opts [Boolean] all_attributes_private (false) See {#all_attributes_private}. # @option opts [Array] :private_attribute_names See {#private_attribute_names}. + # @option opts [Array] :private_attributes See {#private_attributes}. # @option opts [Boolean] :send_events (true) See {#send_events}. # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. # @option opts [Integer] :context_keys_capacity (1000) See {#context_keys_capacity}. @@ -70,7 +75,7 @@ def initialize(opts = {}) @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @poll_interval = opts.has_key?(:poll_interval) && opts[:poll_interval] > Config.default_poll_interval ? opts[:poll_interval] : Config.default_poll_interval @all_attributes_private = opts[:all_attributes_private] || false - @private_attribute_names = opts[:private_attribute_names] || [] + @private_attributes = opts[:private_attributes] || opts[:private_attribute_names] || [] @send_events = opts.has_key?(:send_events) ? opts[:send_events] : Config.default_send_events @context_keys_capacity = opts[:context_keys_capacity] || opts[:user_keys_capacity] || Config.default_context_keys_capacity @context_keys_flush_interval = opts[:context_keys_flush_interval] || opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @@ -217,7 +222,7 @@ def offline? # that the attribute values will not be sent to LaunchDarkly in analytics events and will not # appear on the LaunchDarkly dashboard. # @return [Boolean] - # @see #private_attribute_names + # @see #private_attributes # attr_reader :all_attributes_private @@ -234,7 +239,15 @@ def offline? # @return [Array] # @see #all_attributes_private # - attr_reader :private_attribute_names + attr_reader :private_attributes + + # + # @deprecated Backwards compatibility alias for #private_attributes. + # + # @return [Integer] + # @see #private_attributes + # + alias :private_attribute_names :private_attributes # # Whether to send events back to LaunchDarkly. This differs from {#offline?} in that it affects diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 73a3b136..1c44ba59 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -446,7 +446,7 @@ class EventOutputFormatter SUMMARY_KIND = 'summary' def initialize(config) - @context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + @context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attributes) end # Transforms events into the format used for event sending. diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 292629c5..17a23eb4 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -376,7 +376,7 @@ def with_diagnostic_processor_and_sender(config) # @return [Hash] # def index_event(config, context, timestamp = starting_timestamp) - context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attributes) out = { kind: "index", creationDate: timestamp, @@ -392,7 +392,7 @@ def index_event(config, context, timestamp = starting_timestamp) # @return [Hash] # def identify_event(config, context, timestamp = starting_timestamp) - context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attributes) out = { kind: "identify", creationDate: timestamp, @@ -433,7 +433,7 @@ def feature_event(flag, context, variation, value, timestamp = starting_timestam # @return [Hash] # def debug_event(config, flag, context, variation, value, timestamp = starting_timestamp) - context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attribute_names) + context_filter = LaunchDarkly::Impl::ContextFilter.new(config.all_attributes_private, config.private_attributes) out = { kind: 'debug', creationDate: timestamp, From 47a12e006c1a71c5812f36b66716c05faaa071c1 Mon Sep 17 00:00:00 2001 From: "Matthew M. Keeler" Date: Thu, 29 Dec 2022 07:45:51 -0600 Subject: [PATCH 291/292] Update test data integration to support contexts (#232) --- lib/ldclient-rb/integrations/test_data.rb | 12 +- .../integrations/test_data/flag_builder.rb | 270 ++++++++++++++---- spec/integrations/test_data_spec.rb | 16 +- spec/ldclient_evaluation_spec.rb | 2 +- spec/ldclient_events_spec.rb | 6 +- 5 files changed, 232 insertions(+), 74 deletions(-) diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index cf38f01d..3a810507 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -16,12 +16,12 @@ module Integrations # # @example # td = LaunchDarkly::Integrations::TestData.data_source - # td.update(td.flag("flag-key-1").variation_for_all_users(true)) + # td.update(td.flag("flag-key-1").variation_for_all(true)) # config = LaunchDarkly::Config.new(data_source: td) # client = LaunchDarkly::LDClient.new('sdkKey', config) # # flags can be updated at any time: # td.update(td.flag("flag-key-2") - # .variation_for_user("some-user-key", true) + # .variation_for_key("user", some-user-key", true) # .fallthrough_variation(false)) # # The above example uses a simple boolean flag, but more complex configurations are possible using @@ -79,7 +79,7 @@ def call(_, config) # starts with the same configuration that was last provided for this flag. # # Otherwise, it starts with a new default configuration in which the flag has `true` and - # `false` variations, is `true` for all users when targeting is turned on and + # `false` variations, is `true` for all contexts when targeting is turned on and # `false` otherwise, and currently has targeting turned on. You can change any of those # properties, and provide more complex behavior, using the {FlagBuilder} methods. # @@ -151,15 +151,15 @@ def use_preconfigured_flag(flag) end # - # Copies a full user segment data model object into the test data. + # Copies a full segment data model object into the test data. # # It immediately propagates the change to any `LDClient` instance(s) that you have already # configured to use this `TestData`. If no `LDClient` has been started yet, it simply adds # this segment to the test data which will be provided to any LDClient that you subsequently # configure. # - # This method is currently the only way to inject user segment data, since there is no builder - # API for segments. It is mainly intended for the SDK's own tests of user segment functionality, + # This method is currently the only way to inject segment data, since there is no builder + # API for segments. It is mainly intended for the SDK's own tests of segment functionality, # since application tests that need to produce a desired evaluation state could do so more easily # by just setting flag values. # diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index 0d3d08ca..2b8a495d 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -45,7 +45,7 @@ def on(on) # # Specifies the fallthrough variation. The fallthrough is the value - # that is returned if targeting is on and the user was not matched by a more specific + # that is returned if targeting is on and the context was not matched by a more specific # target or rule. # # If the flag was previously configured with other variations and the variation specified is a boolean, @@ -56,7 +56,7 @@ def on(on) # @return [FlagBuilder] the builder # def fallthrough_variation(variation) - if LaunchDarkly::Impl::Util.bool? variation then + if LaunchDarkly::Impl::Util.bool? variation boolean_flag.fallthrough_variation(variation_for_boolean(variation)) else @fallthrough_variation = variation @@ -76,7 +76,7 @@ def fallthrough_variation(variation) # @return [FlagBuilder] the builder # def off_variation(variation) - if LaunchDarkly::Impl::Util.bool? variation then + if LaunchDarkly::Impl::Util.bool? variation boolean_flag.off_variation(variation_for_boolean(variation)) else @off_variation = variation @@ -108,7 +108,7 @@ def variations(*variations) end # - # Sets the flag to always return the specified variation for all users. + # Sets the flag to always return the specified variation for all contexts. # # The variation is specified, Targeting is switched on, and any existing targets or rules are removed. # The fallthrough variation is set to the specified value. The off variation is left unchanged. @@ -120,31 +120,41 @@ def variations(*variations) # 0 for the first, 1 for the second, etc. # @return [FlagBuilder] the builder # - def variation_for_all_users(variation) - if LaunchDarkly::Impl::Util.bool? variation then - boolean_flag.variation_for_all_users(variation_for_boolean(variation)) + def variation_for_all(variation) + if LaunchDarkly::Impl::Util.bool? variation + boolean_flag.variation_for_all(variation_for_boolean(variation)) else - on(true).clear_rules.clear_user_targets.fallthrough_variation(variation) + on(true).clear_rules.clear_targets.fallthrough_variation(variation) end end # - # Sets the flag to always return the specified variation value for all users. + # @deprecated Backwards compatibility alias for #variation_for_all + # + alias_method :variation_for_all_users, :variation_for_all + + # + # Sets the flag to always return the specified variation value for all context. # # The value may be of any valid JSON type. This method changes the # flag to have only a single variation, which is this value, and to return the same # variation regardless of whether targeting is on or off. Any existing targets or rules # are removed. # - # @param value [Object] the desired value to be returned for all users + # @param value [Object] the desired value to be returned for all contexts # @return [FlagBuilder] the builder # - def value_for_all_users(value) - variations(value).variation_for_all_users(0) + def value_for_all(value) + variations(value).variation_for_all(0) end # - # Sets the flag to return the specified variation for a specific user key when targeting + # @deprecated Backwards compatibility alias for #value_for_all + # + alias_method :value_for_all_users, :value_for_all + + # + # Sets the flag to return the specified variation for a specific context key when targeting # is on. # # This has no effect when targeting is turned off for the flag. @@ -152,36 +162,87 @@ def value_for_all_users(value) # If the flag was previously configured with other variations and the variation specified is a boolean, # this also changes it to a boolean flag. # - # @param user_key [String] a user key + # @param context_kind [String] a context kind + # @param context_key [String] a context key # @param variation [Boolean, Integer] true or false or the desired variation index to return: # 0 for the first, 1 for the second, etc. # @return [FlagBuilder] the builder # - def variation_for_user(user_key, variation) - if LaunchDarkly::Impl::Util.bool? variation then - boolean_flag.variation_for_user(user_key, variation_for_boolean(variation)) - else - if @targets.nil? then - @targets = Hash.new - end - @variations.count.times do | i | - if i == variation then - if @targets[i].nil? then - @targets[i] = [user_key] - else - @targets[i].push(user_key) - end - elsif not @targets[i].nil? then - @targets[i].delete(user_key) + def variation_for_key(context_kind, context_key, variation) + if LaunchDarkly::Impl::Util.bool? variation + return boolean_flag.variation_for_key(context_kind, context_key, variation_for_boolean(variation)) + end + + if @targets.nil? + @targets = Hash.new + end + + targets = @targets[context_kind] || [] + @variations.count.times do | i | + if i == variation + if targets[i].nil? + targets[i] = [context_key] + else + targets[i].push(context_key) end + elsif not targets[i].nil? + targets[i].delete(context_key) end - self end + + @targets[context_kind] = targets + + self + end + + # + # Sets the flag to return the specified variation for a specific user key when targeting + # is on. + # + # This is a shortcut for calling {variation_for_key} with + # `LaunchDarkly::LDContext::KIND_DEFAULT` as the context kind. + # + # This has no effect when targeting is turned off for the flag. + # + # If the flag was previously configured with other variations and the variation specified is a boolean, + # this also changes it to a boolean flag. + # + # @param user_key [String] a user key + # @param variation [Boolean, Integer] true or false or the desired variation index to return: + # 0 for the first, 1 for the second, etc. + # @return [FlagBuilder] the builder + # + def variation_for_user(user_key, variation) + variation_for_key(LaunchDarkly::LDContext::KIND_DEFAULT, user_key, variation) end # # Starts defining a flag rule, using the "is one of" operator. # + # @example create a rule that returns `true` if the name is "Patsy" or "Edina" and the context kind is "user" + # testData.flag("flag") + # .if_match_context("user", :name, 'Patsy', 'Edina') + # .then_return(true); + # + # @param context_kind [String] a context kind + # @param attribute [Symbol] the context attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see FlagRuleBuilder#then_return + # @see FlagRuleBuilder#and_match + # @see FlagRuleBuilder#and_not_match + # + def if_match_context(context_kind, attribute, *values) + FlagRuleBuilder.new(self).and_match_context(context_kind, attribute, *values) + end + + # + # Starts defining a flag rule, using the "is one of" operator. + # + # This is a shortcut for calling {if_match_context} with + # `LaunchDarkly::LDContext::KIND_DEFAULT` as the context kind. + # # @example create a rule that returns `true` if the name is "Patsy" or "Edina" # testData.flag("flag") # .if_match(:name, 'Patsy', 'Edina') @@ -196,12 +257,36 @@ def variation_for_user(user_key, variation) # @see FlagRuleBuilder#and_not_match # def if_match(attribute, *values) - FlagRuleBuilder.new(self).and_match(attribute, *values) + if_match_context(LaunchDarkly::LDContext::KIND_DEFAULT, attribute, *values) + end + + # + # Starts defining a flag rule, using the "is not one of" operator. + # + # @example create a rule that returns `true` if the name is neither "Saffron" nor "Bubble" + # testData.flag("flag") + # .if_not_match_context("user", :name, 'Saffron', 'Bubble') + # .then_return(true) + # + # @param context_kind [String] a context kind + # @param attribute [Symbol] the context attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] a flag rule builder + # + # @see FlagRuleBuilder#then_return + # @see FlagRuleBuilder#and_match + # @see FlagRuleBuilder#and_not_match + # + def if_not_match_context(context_kind, attribute, *values) + FlagRuleBuilder.new(self).and_not_match_context(context_kind, attribute, *values) end # # Starts defining a flag rule, using the "is not one of" operator. # + # This is a shortcut for calling {if_not_match_context} with + # `LaunchDarkly::LDContext::KIND_DEFAULT` as the context kind. + # # @example create a rule that returns `true` if the name is neither "Saffron" nor "Bubble" # testData.flag("flag") # .if_not_match(:name, 'Saffron', 'Bubble') @@ -216,20 +301,25 @@ def if_match(attribute, *values) # @see FlagRuleBuilder#and_not_match # def if_not_match(attribute, *values) - FlagRuleBuilder.new(self).and_not_match(attribute, *values) + if_not_match_context(LaunchDarkly::LDContext::KIND_DEFAULT, attribute, *values) end # - # Removes any existing user targets from the flag. - # This undoes the effect of methods like {#variation_for_user} + # Removes any existing targets from the flag. + # This undoes the effect of methods like {#variation_for_key} # # @return [FlagBuilder] the same builder # - def clear_user_targets + def clear_targets @targets = nil self end + # + # @deprecated Backwards compatibility alias for #clear_targets + # + alias_method :clear_user_targets, :clear_targets + # # Removes any existing rules from the flag. # This undoes the effect of methods like {#if_match} @@ -243,7 +333,7 @@ def clear_rules # @private def add_rule(rule) - if @rules.nil? then + if @rules.nil? @rules = Array.new end @rules.push(rule) @@ -261,7 +351,7 @@ def add_rule(rule) # @return [FlagBuilder] the builder # def boolean_flag - if boolean_flag? then + if boolean_flag? self else variations(true, false) @@ -278,21 +368,35 @@ def build(version) variations: @variations, } - unless @off_variation.nil? then + unless @off_variation.nil? res[:offVariation] = @off_variation end - unless @fallthrough_variation.nil? then + unless @fallthrough_variation.nil? res[:fallthrough] = { variation: @fallthrough_variation } end - unless @targets.nil? then - res[:targets] = @targets.map do | variation, values | - { variation: variation, values: values } + unless @targets.nil? + targets = [] + context_targets = [] + + @targets.each do |kind, targets_for_kind| + targets_for_kind.each_with_index do |values, variation| + next if values.nil? + if kind == LaunchDarkly::LDContext::KIND_DEFAULT + targets << { variation: variation, values: values } + context_targets << { contextKind: LaunchDarkly::LDContext::KIND_DEFAULT, variation: variation, values: [] } + else + context_targets << { contextKind: kind, variation: variation, values: values } + end + end end + + res[:targets] = targets + res[:contextTargets] = context_targets end - unless @rules.nil? then + unless @rules.nil? res[:rules] = @rules.each_with_index.map { | rule, i | rule.build(i) } end @@ -303,8 +407,8 @@ def build(version) # A builder for feature flag rules to be used with {FlagBuilder}. # # In the LaunchDarkly model, a flag can have any number of rules, and a rule can have any number of - # clauses. A clause is an individual test such as "name is 'X'". A rule matches a user if all of the - # rule's clauses match the user. + # clauses. A clause is an individual test such as "name is 'X'". A rule matches a context if all of the + # rule's clauses match the context. # # To start defining a rule, use one of the flag builder's matching methods such as # {FlagBuilder#if_match}. This defines the first clause for the rule. @@ -314,7 +418,7 @@ def build(version) # class FlagRuleBuilder # @private - FlagRuleClause = Struct.new(:attribute, :op, :values, :negate, keyword_init: true) + FlagRuleClause = Struct.new(:contextKind, :attribute, :op, :values, :negate, keyword_init: true) # @private def initialize(flag_builder) @@ -331,6 +435,34 @@ def intialize_copy(other) # # Adds another clause, using the "is one of" operator. # + # @example create a rule that returns `true` if the name is "Patsy", the country is "gb", and the context kind is "user" + # testData.flag("flag") + # .if_match_context("user", :name, 'Patsy') + # .and_match_context("user", :country, 'gb') + # .then_return(true) + # + # @param context_kind [String] a context kind + # @param attribute [Symbol] the context attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder + # + def and_match_context(context_kind, attribute, *values) + @clauses.push(FlagRuleClause.new( + contextKind: context_kind, + attribute: attribute, + op: 'in', + values: values, + negate: false + )) + self + end + + # + # Adds another clause, using the "is one of" operator. + # + # This is a shortcut for calling {and_match_context} with + # `LaunchDarkly::LDContext::KIND_DEFAULT` as the context kind. + # # @example create a rule that returns `true` if the name is "Patsy" and the country is "gb" # testData.flag("flag") # .if_match(:name, 'Patsy') @@ -342,11 +474,30 @@ def intialize_copy(other) # @return [FlagRuleBuilder] the rule builder # def and_match(attribute, *values) + and_match_context(LaunchDarkly::LDContext::KIND_DEFAULT, attribute, *values) + end + + # + # Adds another clause, using the "is not one of" operator. + # + # @example create a rule that returns `true` if the name is "Patsy" and the country is not "gb" + # testData.flag("flag") + # .if_match_context("user", :name, 'Patsy') + # .and_not_match_context("user", :country, 'gb') + # .then_return(true) + # + # @param context_kind [String] a context kind + # @param attribute [Symbol] the context attribute to match against + # @param values [Array] values to compare to + # @return [FlagRuleBuilder] the rule builder + # + def and_not_match_context(context_kind, attribute, *values) @clauses.push(FlagRuleClause.new( + contextKind: context_kind, attribute: attribute, op: 'in', values: values, - negate: false + negate: true )) self end @@ -354,6 +505,9 @@ def and_match(attribute, *values) # # Adds another clause, using the "is not one of" operator. # + # This is a shortcut for calling {and_not_match} with + # `LaunchDarkly::LDContext::KIND_DEFAULT` as the context kind. + # # @example create a rule that returns `true` if the name is "Patsy" and the country is not "gb" # testData.flag("flag") # .if_match(:name, 'Patsy') @@ -365,13 +519,7 @@ def and_match(attribute, *values) # @return [FlagRuleBuilder] the rule builder # def and_not_match(attribute, *values) - @clauses.push(FlagRuleClause.new( - attribute: attribute, - op: 'in', - values: values, - negate: true - )) - self + and_not_match_context(LaunchDarkly::LDContext::KIND_DEFAULT, attribute, *values) end # @@ -386,7 +534,7 @@ def and_not_match(attribute, *values) # @return [FlagBuilder] the flag builder with this rule added # def then_return(variation) - if LaunchDarkly::Impl::Util.bool? variation then + if LaunchDarkly::Impl::Util.bool? variation @variation = @flag_builder.variation_for_boolean(variation) @flag_builder.boolean_flag.add_rule(self) else @@ -423,7 +571,15 @@ def boolean_flag? def deep_copy_hash(from) to = Hash.new - from.each { |k, v| to[k] = v.clone } + from.each do |k, v| + if v.is_a?(Hash) + to[k] = deep_copy_hash(v) + elsif v.is_a?(Array) + to[k] = deep_copy_array(v) + else + to[k] = v.clone + end + end to end diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb index d8888a1e..f576175d 100644 --- a/spec/integrations/test_data_spec.rb +++ b/spec/integrations/test_data_spec.rb @@ -52,7 +52,7 @@ module Integrations version: 1, }) - td.update(td.flag('flag').variation_for_all_users(false)) + td.update(td.flag('flag').variation_for_all(false)) expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ key: 'flag', @@ -115,11 +115,11 @@ module Integrations it 'TestData.flag returns a copy of the existing flag if it exists' do td = TestData.new - td.update(td.flag('flag').variation_for_all_users(true)) + td.update(td.flag('flag').variation_for_all(true)) expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) #modify the flag but dont call update - td.flag('flag').variation_for_all_users(false).build(0) + td.flag('flag').variation_for_all(false).build(0) expect(td.flag('flag').build(0)[:fallthrough][:variation]).to eq(0) end @@ -172,7 +172,7 @@ module Integrations end it 'can set variation for all users' do - f = TestData::FlagBuilder.new('flag').variation_for_all_users(true).build(1) + f = TestData::FlagBuilder.new('flag').variation_for_all(true).build(1) expect(f[:rules]).to be_nil expect(f[:targets]).to be_nil expect(f[:fallthrough][:variation]).to be(0) @@ -183,7 +183,7 @@ module Integrations .if_match('name', 'ben') .then_return(false) .variation_for_user('ben', false) - .variation_for_all_users(true).build(1) + .variation_for_all(true).build(1) expect(f.keys).to_not include(:rules) expect(f.keys).to_not include(:targets) expect(f[:fallthrough][:variation]).to be(0) @@ -199,10 +199,10 @@ module Integrations end it 'can make an immutable copy of its self' do - fb = TestData::FlagBuilder.new('flag').variation_for_all_users(true) + fb = TestData::FlagBuilder.new('flag').variation_for_all(true) expect(fb.build(0)).to eql(fb.clone.build(0)) - fcopy = fb.clone.variation_for_all_users(false).build(0) + fcopy = fb.clone.variation_for_all(false).build(0) f = fb.build(0) expect(f[:key]).to eql(fcopy[:key]) @@ -221,12 +221,14 @@ module Integrations id: "rule0", variation: 0, clauses: [{ + contextKind: "user", attribute: 'name', op: 'in', values: ['ben'], negate: false, }, { + contextKind: "user", attribute: 'country', op: 'in', values: ['fr'], diff --git a/spec/ldclient_evaluation_spec.rb b/spec/ldclient_evaluation_spec.rb index 1b088d06..424276fc 100644 --- a/spec/ldclient_evaluation_spec.rb +++ b/spec/ldclient_evaluation_spec.rb @@ -22,7 +22,7 @@ module LaunchDarkly it "returns the value for an existing feature" do td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + td.update(td.flag("flagkey").variations("value").variation_for_all(0)) with_client(test_config(data_source: td)) do |client| expect(client.variation("flagkey", basic_context, "default")).to eq "value" diff --git a/spec/ldclient_events_spec.rb b/spec/ldclient_events_spec.rb index fcf95536..62adda39 100644 --- a/spec/ldclient_events_spec.rb +++ b/spec/ldclient_events_spec.rb @@ -30,7 +30,7 @@ def event_processor(client) it "known flag" do td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + td.update(td.flag("flagkey").variations("value").variation_for_all(0)) context = basic_context with_client(test_config(data_source: td)) do |client| @@ -43,7 +43,7 @@ def event_processor(client) it "does not send event, and logs error, if context is nil" do td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + td.update(td.flag("flagkey").variations("value").variation_for_all(0)) logger = double().as_null_object @@ -56,7 +56,7 @@ def event_processor(client) it "does not send event, and logs error, if context key is nil" do td = Integrations::TestData.data_source - td.update(td.flag("flagkey").variations("value").variation_for_all_users(0)) + td.update(td.flag("flagkey").variations("value").variation_for_all(0)) logger = double().as_null_object keyless_user = { key: nil } From 93f0e79c00a034b8a4969d07a12ff7dd355c4a37 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 11 Jan 2023 11:15:02 -0800 Subject: [PATCH 292/292] improve data model validation logging; allow missing/empty attribute for segmentMatch (#236) --- lib/ldclient-rb/impl/evaluator_helpers.rb | 8 +- lib/ldclient-rb/impl/model/clause.rb | 16 +- lib/ldclient-rb/impl/model/feature_flag.rb | 55 ++-- lib/ldclient-rb/impl/model/segment.rb | 12 +- lib/ldclient-rb/impl/model/serialization.rb | 2 +- spec/capturing_logger.rb | 16 ++ spec/impl/model/model_validation_spec.rb | 275 ++++++++++++++++++++ 7 files changed, 353 insertions(+), 31 deletions(-) create mode 100644 spec/capturing_logger.rb create mode 100644 spec/impl/model/model_validation_spec.rb diff --git a/lib/ldclient-rb/impl/evaluator_helpers.rb b/lib/ldclient-rb/impl/evaluator_helpers.rb index 842d734f..f7e0b57b 100644 --- a/lib/ldclient-rb/impl/evaluator_helpers.rb +++ b/lib/ldclient-rb/impl/evaluator_helpers.rb @@ -10,9 +10,9 @@ module EvaluatorHelpers # @param flag [LaunchDarkly::Impl::Model::FeatureFlag] # @param reason [LaunchDarkly::EvaluationReason] # - def self.evaluation_detail_for_off_variation(flag, reason, logger = nil) + def self.evaluation_detail_for_off_variation(flag, reason) index = flag.off_variation - index.nil? ? EvaluationDetail.new(nil, nil, reason) : evaluation_detail_for_variation(flag, index, reason, logger) + index.nil? ? EvaluationDetail.new(nil, nil, reason) : evaluation_detail_for_variation(flag, index, reason) end # @@ -20,11 +20,11 @@ def self.evaluation_detail_for_off_variation(flag, reason, logger = nil) # @param index [Integer] # @param reason [LaunchDarkly::EvaluationReason] # - def self.evaluation_detail_for_variation(flag, index, reason, logger = nil) + def self.evaluation_detail_for_variation(flag, index, reason) vars = flag.variations if index < 0 || index >= vars.length - logger.error("[LDClient] Data inconsistency in feature flag \"#{flag.key}\": invalid variation index") unless logger.nil? EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) + # This error condition has already been logged at the time we received the flag data - see model/feature_flag.rb else EvaluationDetail.new(vars[index], index, reason) end diff --git a/lib/ldclient-rb/impl/model/clause.rb b/lib/ldclient-rb/impl/model/clause.rb index 0227dc30..271606e4 100644 --- a/lib/ldclient-rb/impl/model/clause.rb +++ b/lib/ldclient-rb/impl/model/clause.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/reference" + # See serialization.rb for implementation notes on the data model classes. @@ -5,14 +7,18 @@ module LaunchDarkly module Impl module Model class Clause - def initialize(data, logger) + def initialize(data, errors_out = nil) @data = data @context_kind = data[:contextKind] - @attribute = (@context_kind.nil? || @context_kind.empty?) ? Reference.create_literal(data[:attribute]) : Reference.create(data[:attribute]) - unless logger.nil? || @attribute.error.nil? - logger.error("[LDClient] Data inconsistency in feature flag: #{@attribute.error}") - end @op = data[:op].to_sym + if @op == :segmentMatch + @attribute = nil + else + @attribute = (@context_kind.nil? || @context_kind.empty?) ? Reference.create_literal(data[:attribute]) : Reference.create(data[:attribute]) + unless errors_out.nil? || @attribute.error.nil? + errors_out << "clause has invalid attribute: #{@attribute.error}" + end + end @values = data[:values] || [] @negate = !!data[:negate] end diff --git a/lib/ldclient-rb/impl/model/feature_flag.rb b/lib/ldclient-rb/impl/model/feature_flag.rb index 2f89905c..6c93877b 100644 --- a/lib/ldclient-rb/impl/model/feature_flag.rb +++ b/lib/ldclient-rb/impl/model/feature_flag.rb @@ -4,6 +4,14 @@ # See serialization.rb for implementation notes on the data model classes. +def check_variation_range(flag, errors_out, variation, description) + unless flag.nil? || errors_out.nil? || variation.nil? + if variation < 0 || variation >= flag.variations.length + errors_out << "#{description} has invalid variation index" + end + end +end + module LaunchDarkly module Impl module Model @@ -12,6 +20,7 @@ class FeatureFlag # @param logger [Logger|nil] def initialize(data, logger = nil) raise ArgumentError, "expected hash but got #{data.class}" unless data.is_a?(Hash) + errors = [] @data = data @key = data[:key] @version = data[:version] @@ -20,24 +29,30 @@ def initialize(data, logger = nil) @variations = data[:variations] || [] @on = !!data[:on] fallthrough = data[:fallthrough] || {} - @fallthrough = VariationOrRollout.new(fallthrough[:variation], fallthrough[:rollout]) + @fallthrough = VariationOrRollout.new(fallthrough[:variation], fallthrough[:rollout], self, errors, "fallthrough") @off_variation = data[:offVariation] + check_variation_range(self, errors, @off_variation, "off variation") @prerequisites = (data[:prerequisites] || []).map do |prereq_data| - Prerequisite.new(prereq_data, self, logger) + Prerequisite.new(prereq_data, self, errors) end @targets = (data[:targets] || []).map do |target_data| - Target.new(target_data, self, logger) + Target.new(target_data, self, errors) end @context_targets = (data[:contextTargets] || []).map do |target_data| - Target.new(target_data, self, logger) + Target.new(target_data, self, errors) end @rules = (data[:rules] || []).map.with_index do |rule_data, index| - FlagRule.new(rule_data, index, self, logger) + FlagRule.new(rule_data, index, self, errors) end @salt = data[:salt] - @off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off, logger) + @off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off) @fallthrough_results = Preprocessor.precompute_multi_variation_results(self, EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true)) + unless logger.nil? + errors.each do |message| + logger.error("[LDClient] Data inconsistency in feature flag \"#{@key}\": #{message}") + end + end end # @return [Hash] @@ -93,12 +108,13 @@ def to_json(*a) end class Prerequisite - def initialize(data, flag, logger) + def initialize(data, flag, errors_out = nil) @data = data @key = data[:key] @variation = data[:variation] @failure_result = EvaluatorHelpers.evaluation_detail_for_off_variation(flag, - EvaluationReason::prerequisite_failed(@key), logger) + EvaluationReason::prerequisite_failed(@key)) + check_variation_range(flag, errors_out, @variation, "prerequisite") end # @return [Hash] @@ -112,13 +128,14 @@ def initialize(data, flag, logger) end class Target - def initialize(data, flag, logger) + def initialize(data, flag, errors_out = nil) @kind = data[:contextKind] || LDContext::KIND_DEFAULT @data = data @values = Set.new(data[:values] || []) @variation = data[:variation] @match_result = EvaluatorHelpers.evaluation_detail_for_variation(flag, - data[:variation], EvaluationReason::target_match, logger) + data[:variation], EvaluationReason::target_match) + check_variation_range(flag, errors_out, @variation, "target") end # @return [String] @@ -134,12 +151,12 @@ def initialize(data, flag, logger) end class FlagRule - def initialize(data, rule_index, flag, logger) + def initialize(data, rule_index, flag, errors_out = nil) @data = data @clauses = (data[:clauses] || []).map do |clause_data| - Clause.new(clause_data, logger) + Clause.new(clause_data, errors_out) end - @variation_or_rollout = VariationOrRollout.new(data[:variation], data[:rollout]) + @variation_or_rollout = VariationOrRollout.new(data[:variation], data[:rollout], flag, errors_out, 'rule') rule_id = data[:id] match_reason = EvaluationReason::rule_match(rule_index, rule_id) match_reason_in_experiment = EvaluationReason::rule_match(rule_index, rule_id, true) @@ -157,9 +174,10 @@ def initialize(data, rule_index, flag, logger) end class VariationOrRollout - def initialize(variation, rollout_data) + def initialize(variation, rollout_data, flag = nil, errors_out = nil, description = nil) @variation = variation - @rollout = rollout_data.nil? ? nil : Rollout.new(rollout_data) + check_variation_range(flag, errors_out, variation, description) + @rollout = rollout_data.nil? ? nil : Rollout.new(rollout_data, flag, errors_out, description) end # @return [Integer|nil] @@ -169,9 +187,9 @@ def initialize(variation, rollout_data) end class Rollout - def initialize(data) + def initialize(data, flag = nil, errors_out = nil, description = nil) @context_kind = data[:contextKind] - @variations = (data[:variations] || []).map { |v| WeightedVariation.new(v) } + @variations = (data[:variations] || []).map { |v| WeightedVariation.new(v, flag, errors_out, description) } @bucket_by = data[:bucketBy] @kind = data[:kind] @is_experiment = @kind == "experiment" @@ -193,10 +211,11 @@ def initialize(data) end class WeightedVariation - def initialize(data) + def initialize(data, flag = nil, errors_out = nil, description = nil) @variation = data[:variation] @weight = data[:weight] @untracked = !!data[:untracked] + check_variation_range(flag, errors_out, @variation, description) end # @return [Integer] diff --git a/lib/ldclient-rb/impl/model/segment.rb b/lib/ldclient-rb/impl/model/segment.rb index d78036a7..6b8f3cb6 100644 --- a/lib/ldclient-rb/impl/model/segment.rb +++ b/lib/ldclient-rb/impl/model/segment.rb @@ -12,6 +12,7 @@ class Segment # @param logger [Logger|nil] def initialize(data, logger = nil) raise ArgumentError, "expected hash but got #{data.class}" unless data.is_a?(Hash) + errors = [] @data = data @key = data[:key] @version = data[:version] @@ -26,12 +27,17 @@ def initialize(data, logger = nil) SegmentTarget.new(target_data) end @rules = (data[:rules] || []).map do |rule_data| - SegmentRule.new(rule_data, logger) + SegmentRule.new(rule_data, errors) end @unbounded = !!data[:unbounded] @unbounded_context_kind = data[:unboundedContextKind] || LDContext::KIND_DEFAULT @generation = data[:generation] @salt = data[:salt] + unless logger.nil? + errors.each do |message| + logger.error("[LDClient] Data inconsistency in segment \"#{@key}\": #{message}") + end + end end # @return [Hash] @@ -98,10 +104,10 @@ def initialize(data) end class SegmentRule - def initialize(data, logger) + def initialize(data, errors_out = nil) @data = data @clauses = (data[:clauses] || []).map do |clause_data| - Clause.new(clause_data, logger) + Clause.new(clause_data, errors_out) end @weight = data[:weight] @bucket_by = data[:bucketBy] diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index 3bc3029d..088112b8 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -37,7 +37,7 @@ module Model # @param kind [Hash] normally either FEATURES or SEGMENTS # @param input [object] a JSON string or a parsed hash (or a data model object, in which case # we'll just return the original object) - # @param logger [Logger|nil] logs warnings if there are any data validation problems + # @param logger [Logger|nil] logs errors if there are any data validation problems # @return [Object] the flag or segment (or, for an unknown data kind, the data as a hash) def self.deserialize(kind, input, logger = nil) return nil if input.nil? diff --git a/spec/capturing_logger.rb b/spec/capturing_logger.rb new file mode 100644 index 00000000..75064939 --- /dev/null +++ b/spec/capturing_logger.rb @@ -0,0 +1,16 @@ +require "stringio" + +class CapturingLogger + def initialize + @output = StringIO.new + @logger = Logger.new(@output) + end + + def output + @output.string + end + + def method_missing(meth, *args, &block) + @logger.send(meth, *args, &block) + end +end diff --git a/spec/impl/model/model_validation_spec.rb b/spec/impl/model/model_validation_spec.rb new file mode 100644 index 00000000..665f17cd --- /dev/null +++ b/spec/impl/model/model_validation_spec.rb @@ -0,0 +1,275 @@ +require "ldclient-rb/impl/model/feature_flag" + +require "capturing_logger" +require "model_builders" +require "spec_helper" + + +def base_flag + FlagBuilder.new("flagkey").build.as_json +end + +def base_segment + FlagBuilder.new("flagkey").build.as_json +end + +def rules_with_clause(clause) + { + rules: [ + { variation: 0, clauses: [ clause ] }, + ], + } +end + +def segment_rules_with_clause(clause) + { + rules: [ + { clauses: [ clause ] }, + ], + } +end + + +module LaunchDarkly + module Impl + describe "flag model validation" do + describe "should not log errors for" do + [ + [ + "minimal valid flag", + base_flag, + ], + [ + "valid off variation", + base_flag.merge({variations: [true, false], offVariation: 1}), + ], + [ + "valid fallthrough variation", + base_flag.merge({variations: [true, false], fallthrough: {variation: 1}}), + ], + [ + "valid fallthrough rollout", + base_flag.merge({variations: [true, false], fallthrough: { + rollout: { + variations: [ + { variation: 0, weight: 100000 }, + ], + }, + }}), + ], + [ + "valid target variation", + base_flag.merge({variations: [true, false], targets: [ {variation: 1, values: []} ]}), + ], + [ + "valid rule variation", + base_flag.merge({variations: [true, false], rules: [ {variation: 1, clauses: []} ]}), + ], + [ + "valid attribute reference", + base_flag.merge(rules_with_clause({ + attribute: "name", op: "in", values: ["a"] + })), + ], + [ + "missing attribute reference when operator is segmentMatch", + base_flag.merge(rules_with_clause({ + op: "segmentMatch", values: ["a"] + })), + ], + [ + "empty attribute reference when operator is segmentMatch", + base_flag.merge(rules_with_clause({ + attribute: "", op: "segmentMatch", values: ["a"] + })), + ], + ].each do |params| + (name, flag_data_hash) = params + it(name) do + logger = CapturingLogger.new + LaunchDarkly::Impl::Model::FeatureFlag.new(flag_data_hash, logger) + expect(logger.output).to eq('') + end + end + end + + describe "should log errors for" do + [ + [ + "negative off variation", + base_flag.merge({variations: [true, false], offVariation: -1}), + "off variation has invalid variation index", + ], + [ + "too high off variation", + base_flag.merge({variations: [true, false], offVariation: 2}), + "off variation has invalid variation index", + ], + [ + "negative fallthrough variation", + base_flag.merge({variations: [true, false], fallthrough: {variation: -1}}), + "fallthrough has invalid variation index", + ], + [ + "too high fallthrough variation", + base_flag.merge({variations: [true, false], fallthrough: {variation: 2}}), + "fallthrough has invalid variation index", + ], + [ + "negative fallthrough rollout variation", + base_flag.merge({variations: [true, false], fallthrough: { + rollout: { + variations: [ + { variation: -1, weight: 100000 }, + ], + }, + }}), + "fallthrough has invalid variation index", + ], + [ + "fallthrough rollout too high variation", + base_flag.merge({variations: [true, false], fallthrough: { + rollout: { + variations: [ + { variation: 2, weight: 100000 }, + ], + }, + }}), + "fallthrough has invalid variation index", + ], + [ + "negative target variation", + base_flag.merge({ + variations: [true, false], + targets: [ + { variation: -1, values: [] }, + ], + }), + "target has invalid variation index", + ], + [ + "too high rule variation", + base_flag.merge({ + variations: [true, false], + targets: [ + { variation: 2, values: [] }, + ], + }), + "target has invalid variation index", + ], + [ + "negative rule variation", + base_flag.merge({ + variations: [true, false], + rules: [ + { variation: -1, clauses: [] }, + ], + }), + "rule has invalid variation index", + ], + [ + "too high rule variation", + base_flag.merge({ + variations: [true, false], + rules: [ + { variation: 2, clauses: [] }, + ], + }), + "rule has invalid variation index", + ], + [ + "negative rule rollout variation", + base_flag.merge({ + variations: [true, false], + rules: [ + { rollout: { variations: [ { variation: -1, weight: 10000 } ] }, clauses: [] }, + ], + }), + "rule has invalid variation index", + ], + [ + "too high rule variation", + base_flag.merge({ + variations: [true, false], + rules: [ + { rollout: { variations: [ { variation: 2, weight: 10000 } ] }, clauses: [] }, + ], + }), + "rule has invalid variation index", + ], + [ + "missing attribute reference", + base_flag.merge(rules_with_clause({ op: "in", values: ["a"] })), + "clause has invalid attribute: empty reference", + ], + [ + "empty attribute reference", + base_flag.merge(rules_with_clause({ attribute: "", op: "in", values: ["a"] })), + "clause has invalid attribute: empty reference", + ], + [ + "invalid attribute reference", + base_flag.merge(rules_with_clause({ contextKind: "user", attribute: "//", op: "in", values: ["a"] })), + "clause has invalid attribute: double or trailing slash", + ], + ].each do |params| + (name, flag_data_hash, expected_error) = params + it(name) do + logger = CapturingLogger.new + LaunchDarkly::Impl::Model::FeatureFlag.new(flag_data_hash, logger) + expect(logger.output).to match(Regexp.escape( + "Data inconsistency in feature flag \"#{flag_data_hash[:key]}\": #{expected_error}" + )) + end + end + end + end + + describe "segment model validation" do + describe "should not log errors for" do + [ + [ + "minimal valid segment", + base_segment, + ], + ].each do |params| + (name, segment_data_hash) = params + it(name) do + logger = CapturingLogger.new + LaunchDarkly::Impl::Model::Segment.new(segment_data_hash, logger) + expect(logger.output).to eq('') + end + end + end + end + + describe "should log errors for" do + [ + [ + "missing attribute reference", + base_segment.merge(segment_rules_with_clause({ op: "in", values: ["a"] })), + "clause has invalid attribute: empty reference", + ], + [ + "empty attribute reference", + base_segment.merge(segment_rules_with_clause({ attribute: "", op: "in", values: ["a"] })), + "clause has invalid attribute: empty reference", + ], + [ + "invalid attribute reference", + base_segment.merge(segment_rules_with_clause({ contextKind: "user", attribute: "//", op: "in", values: ["a"] })), + "clause has invalid attribute: double or trailing slash", + ], + ].each do |params| + (name, segment_data_hash, expected_error) = params + it(name) do + logger = CapturingLogger.new + LaunchDarkly::Impl::Model::Segment.new(segment_data_hash, logger) + expect(logger.output).to match(Regexp.escape( + "Data inconsistency in segment \"#{segment_data_hash[:key]}\": #{expected_error}" + )) + end + end + end + end +end