Skip to content

Commit

Permalink
Merge pull request #93 from launchdarkly/eb/ch29197/dependency-order
Browse files Browse the repository at this point in the history
implement dependency ordering for feature store data
  • Loading branch information
eli-darkly authored Jan 15, 2019
2 parents f18794d + 8436be4 commit aee1a3b
Show file tree
Hide file tree
Showing 7 changed files with 213 additions and 5 deletions.
47 changes: 47 additions & 0 deletions lib/ldclient-rb/impl/store_client_wrapper.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
require "ldclient-rb/interfaces"
require "ldclient-rb/impl/store_data_set_sorter"

module LaunchDarkly
module Impl
#
# Provides additional behavior that the client requires before or after feature store operations.
# Currently this just means sorting the data set for init(). In the future we may also use this
# to provide an update listener capability.
#
class FeatureStoreClientWrapper
include Interfaces::FeatureStore

def initialize(store)
@store = store
end

def init(all_data)
@store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data))
end

def get(kind, key)
@store.get(kind, key)
end

def all(kind)
@store.all(kind)
end

def upsert(kind, item)
@store.upsert(kind, item)
end

def delete(kind, key, version)
@store.delete(kind, key, version)
end

def initialized?
@store.initialized?
end

def stop
@store.stop
end
end
end
end
55 changes: 55 additions & 0 deletions lib/ldclient-rb/impl/store_data_set_sorter.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@

module LaunchDarkly
module Impl
#
# Implements a dependency graph ordering for data to be stored in a feature store. We must use this
# on every data set that will be passed to the feature store's init() method.
#
class FeatureStoreDataSetSorter
#
# Returns a copy of the input hash that has the following guarantees: the iteration order of the outer
# hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each
# data kind that has a :get_dependency_keys function, the inner hash will have an iteration order
# where B is before A if A has a dependency on B.
#
# This implementation relies on the fact that hashes in Ruby have an iteration order that is the same
# as the insertion order. Also, due to the way we deserialize JSON received from LaunchDarkly, the
# keys in the inner hash will always be symbols.
#
def self.sort_all_collections(all_data)
outer_hash = {}
kinds = all_data.keys.sort_by { |k|
k[:priority].nil? ? k[:namespace].length : k[:priority] # arbitrary order if priority is unknown
}
kinds.each do |kind|
items = all_data[kind]
outer_hash[kind] = self.sort_collection(kind, items)
end
outer_hash
end

def self.sort_collection(kind, input)
dependency_fn = kind[:get_dependency_keys]
return input if dependency_fn.nil? || input.empty?
remaining_items = input.clone
items_out = {}
while !remaining_items.empty?
# pick a random item that hasn't been updated yet
key, item = remaining_items.first
self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out)
end
items_out
end

def self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out)
item_key = item[:key].to_sym
remaining_items.delete(item_key) # we won't need to visit this item again
dependency_fn.call(item).each do |dep_key|
dep_item = remaining_items[dep_key.to_sym]
self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) if !dep_item.nil?
end
items_out[item_key] = item
end
end
end
end
13 changes: 11 additions & 2 deletions lib/ldclient-rb/in_memory_store.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,21 @@ module LaunchDarkly
# we add another storable data type in the future, as long as it follows the same pattern
# (having "key", "version", and "deleted" properties), we only need to add a corresponding
# constant here and the existing store should be able to handle it.
#
# The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter
# to ensure data consistency during non-atomic updates.

# @private
FEATURES = {
namespace: "features"
namespace: "features",
priority: 1, # that is, features should be stored after segments
get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } }
}.freeze

# @private
SEGMENTS = {
namespace: "segments"
namespace: "segments",
priority: 0
}.freeze

#
Expand Down
5 changes: 5 additions & 0 deletions lib/ldclient-rb/integrations/util/store_wrapper.rb
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,11 @@ module FeatureStoreCore
# Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init},
# but the wrapper will take care of updating the cache if caching is enabled.
#
# If possible, the store should update the entire data set atomically. If that is not possible,
# it should iterate through the outer hash and then the inner hash using the existing iteration
# order of those hashes (the SDK will ensure that the items were inserted into the hashes in
# the correct order), storing each item, and then delete any leftover items at the very end.
#
# @param all_data [Hash] a hash where each key is one of the data kind objects, and each
# value is in turn a hash of string keys to entities
# @return [void]
Expand Down
5 changes: 5 additions & 0 deletions lib/ldclient-rb/interfaces.rb
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,11 @@ module FeatureStore
# date-- there is no need to perform individual version comparisons between the existing
# objects and the supplied features.
#
# If possible, the store should update the entire data set atomically. If that is not possible,
# it should iterate through the outer hash and then the inner hash using the existing iteration
# order of those hashes (the SDK will ensure that the items were inserted into the hashes in
# the correct order), storing each item, and then delete any leftover items at the very end.
#
# @param all_data [Hash] a hash where each key is one of the data kind objects, and each
# value is in turn a hash of string keys to entities
# @return [void]
Expand Down
14 changes: 11 additions & 3 deletions lib/ldclient-rb/ldclient.rb
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
require "ldclient-rb/impl/store_client_wrapper"
require "concurrent/atomics"
require "digest/sha1"
require "logger"
Expand All @@ -23,8 +24,15 @@ class LDClient
# @return [LDClient] The LaunchDarkly client instance
def initialize(sdk_key, config = Config.default, wait_for_sec = 5)
@sdk_key = sdk_key
@config = config
@store = config.feature_store

# We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add
# some necessary logic around updates. Unfortunately, we have code elsewhere that accesses
# the feature store through the Config object, so we need to make a new Config that uses
# the wrapped store.
@store = Impl::FeatureStoreClientWrapper.new(config.feature_store)
updated_config = config.clone
updated_config.instance_variable_set(:@feature_store, @store)
@config = updated_config

if @config.offline? || !@config.send_events
@event_processor = NullEventProcessor.new
Expand All @@ -39,7 +47,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5)

data_source_or_factory = @config.data_source || self.method(:create_default_data_source)
if data_source_or_factory.respond_to? :call
@data_source = data_source_or_factory.call(sdk_key, config)
@data_source = data_source_or_factory.call(sdk_key, @config)
else
@data_source = data_source_or_factory
end
Expand Down
79 changes: 79 additions & 0 deletions spec/ldclient_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -375,4 +375,83 @@ def event_processor
expect(ep).not_to be_a(LaunchDarkly::NullEventProcessor)
end
end

describe "feature store data ordering" do
let(:dependency_ordering_test_data) {
{
LaunchDarkly::FEATURES => {
a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] },
b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] },
c: { key: "c" },
d: { key: "d" },
e: { key: "e" },
f: { key: "f" }
},
LaunchDarkly::SEGMENTS => {
o: { key: "o" }
}
}
}

class FakeFeatureStore
attr_reader :received_data

def init(all_data)
@received_data = all_data
end
end

class FakeUpdateProcessor
def initialize(store, data)
@store = store
@data = data
end

def start
@store.init(@data)
ev = Concurrent::Event.new
ev.set
ev
end

def stop
end

def initialized?
true
end
end

it "passes data set to feature store in correct order on init" do
store = FakeFeatureStore.new
data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store,
dependency_ordering_test_data) }
config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory)
client = subject.new("secret", config)

data = store.received_data
expect(data).not_to be_nil
expect(data.count).to eq(2)

# Segments should always come first
expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS)
expect(data.values[0].count).to eq(dependency_ordering_test_data[LaunchDarkly::SEGMENTS].count)

# Features should be ordered so that a flag always appears after its prerequisites, if any
expect(data.keys[1]).to be(LaunchDarkly::FEATURES)
flags_map = data.values[1]
flags_list = flags_map.values
expect(flags_list.count).to eq(dependency_ordering_test_data[LaunchDarkly::FEATURES].count)
flags_list.each_with_index do |item, item_index|
(item[:prerequisites] || []).each do |prereq|
prereq = flags_map[prereq[:key].to_sym]
prereq_index = flags_list.index(prereq)
if prereq_index > item_index
all_keys = (flags_list.map { |f| f[:key] }).join(", ")
raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]"
end
end
end
end
end
end

0 comments on commit aee1a3b

Please sign in to comment.