def init_internal(self, all_data):
        # Start by reading the existing keys; we will later delete any of these that weren't in all_data.
        unused_old_keys = self._read_existing_keys(all_data.keys())
        requests = []
        num_items = 0
        inited_key = self._inited_key()

        # Insert or update every provided item
        for kind, items in all_data.items():
            for key, item in items.items():
                encoded_item = self._marshal_item(kind, item)
                requests.append({ 'PutRequest': { 'Item': encoded_item } })
                combined_key = (self._namespace_for_kind(kind), key)
                unused_old_keys.discard(combined_key)
                num_items = num_items + 1

        # Now delete any previously existing items whose keys were not in the current data
        for combined_key in unused_old_keys:
            if combined_key[0] != inited_key:
                requests.append({ 'DeleteRequest': { 'Key': self._make_keys(combined_key[0], combined_key[1]) } })
        
        # Now set the special key that we check in initialized_internal()
        requests.append({ 'PutRequest': { 'Item': self._make_keys(inited_key, inited_key) } })

        _DynamoDBHelpers.batch_write_requests(self._client, self._table_name, requests)
        log.info('Initialized table %s with %d items', self._table_name, num_items)
Esempio n. 2
0
    def init_internal(self, all_data):
        # Start by reading the existing keys; we will later delete any of these that weren't in all_data.
        index, keys = self._client.kv.get(self._prefix, recurse=True, keys=True)
        unused_old_keys = set(keys or [])

        num_items = 0
        inited_key = self._inited_key()
        unused_old_keys.discard(inited_key)

        # Insert or update every provided item. Note that this Consul client doesn't support batch
        # operations (the "txn" method), so we'll write them one at a time.
        for kind, items in all_data.items():
            for key, item in items.items():
                encoded_item = json.dumps(item)
                db_key = self._item_key(kind, item['key'])
                self._client.kv.put(db_key, encoded_item)
                unused_old_keys.discard(db_key)
                num_items = num_items + 1

        # Now delete any previously existing items whose keys were not in the current data
        for key in unused_old_keys:
            self._client.kv.delete(key)
        
        # Now set the special key that we check in initialized_internal()
        self._client.kv.put(inited_key, "")

        log.info('Initialized Consul store with %d items', num_items)
Esempio n. 3
0
 def _evaluate_and_send_events(self, flag, user, default):
     value = yield self._evaluate(flag, user)
     if value is None:
         value = default
     log.info("value: " + str(value))
     self._send_event({'kind': 'feature', 'key': flag.get('key'), 'user': user, 'value': value,
                       'default': default, 'version': flag.get('version')})
     defer.returnValue(value)
Esempio n. 4
0
 def __init__(self, url, prefix, max_connections):
     if not have_redis:
         raise NotImplementedError(
             "Cannot use Redis feature store because redis package is not installed"
         )
     self._prefix = prefix or 'launchdarkly'
     self._pool = redis.ConnectionPool.from_url(
         url=url, max_connections=max_connections)
     self.test_update_hook = None  # exposed for testing
     log.info("Started RedisFeatureStore connected to URL: " + url +
              " using prefix: " + self._prefix)
Esempio n. 5
0
    def init(self, features):
        pipe = redis.Redis(connection_pool=self._pool).pipeline()
        pipe.delete(self._features_key)

        self._cache.clear()

        for k, f in features.items():
            f_json = json.dumps(f)
            pipe.hset(self._features_key, k, f_json)
            self._cache[k] = f
        pipe.execute()
        log.info("Initialized RedisFeatureStore with " + str(len(features)) + " feature flags")
Esempio n. 6
0
    def __init__(self,
                 url='redis://localhost:6379/0',
                 prefix='launchdarkly',
                 max_connections=16,
                 expiration=15,
                 capacity=1000):

        self._features_key = "{0}:features".format(prefix)
        self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity,
                                                                           max_age_seconds=expiration)
        self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections)
        log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix)
Esempio n. 7
0
    def __init__(self,
                 url='redis://localhost:6379/0',
                 prefix='launchdarkly',
                 max_connections=16,
                 expiration=15,
                 capacity=1000):

        self._prefix = prefix
        self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity,
                                                                           max_age_seconds=expiration)
        self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections)
        self._inited = MemoizedValue(lambda: self._query_init())
        log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix)
Esempio n. 8
0
    def init_internal(self, all_data):
        pipe = redis.Redis(connection_pool=self._pool).pipeline()

        all_count = 0

        for kind, items in all_data.items():
            base_key = self._items_key(kind)
            pipe.delete(base_key)
            for key, item in items.items():
                item_json = json.dumps(item)
                pipe.hset(base_key, key, item_json)
            all_count = all_count + len(items)
        pipe.execute()
        log.info("Initialized RedisFeatureStore with %d items", all_count)
Esempio n. 9
0
    def init(self, all_data):
        pipe = redis.Redis(connection_pool=self._pool).pipeline()
        
        self._cache.clear()
        all_count = 0

        for kind, items in all_data.items():
            base_key = self._items_key(kind)
            pipe.delete(base_key)
            for key, item in items.items():
                item_json = json.dumps(item)
                pipe.hset(base_key, key, item_json)
                self._cache[self._cache_key(kind, key)] = item
            all_count = all_count + len(items)
        try:
            pipe.execute()
        except:
            self._cache.clear()
            raise
        log.info("Initialized RedisFeatureStore with %d items", all_count)
        self._inited.set(True)