def setUp(self): self._segment_changes_file_name = join(dirname(__file__), 'segmentChanges.json') self._split_changes_file_name = join(dirname(__file__), 'splitChanges.json') self._redis = get_redis({'redisPrefix': 'test'}) self._redis_split_cache = RedisSplitCache(self._redis) self._redis_segment_cache = RedisSegmentCache(self._redis)
def get_redis_broker(api_key, **kwargs): """ Builds a Split Broker that that gets its information from a Redis instance. It also writes impressions and metrics to the same instance. In order for this work properly, you need to periodically call the update_splits and update_segments scripts. You also need to run the send_impressions and send_metrics scripts in order to push the impressions and metrics onto the Split.io backend. The config_file parameter is the name of a file that contains the broker configuration. Here's an example of a config file: { "apiKey": "some-api-key", "sdkApiBaseUrl": "https://sdk.split.io/api", "eventsApiBaseUrl": "https://events.split.io/api", "redisFactory": 'some.redis.factory', "redisHost": "localhost", "redisPort": 6879, "redisDb": 0, } If the redisFactory entry is present, it is used to build the redis broker instance, otherwise the values of redisHost, redisPort and redisDb are used. If the api_key argument is 'localhost' a localhost environment broker is built based on the contents of a .split file in the user's home directory. The definition file has the following syntax: file: (comment | split_line)+ comment : '#' string*\n split_line : feature_name ' ' treatment\n feature_name : string treatment : string It is possible to change the location of the split file by using the split_definition_file_name argument. :param api_key: The API key provided by Split.io :type api_key: str :param config_file: Filename of the config file :type config_file: str :param sdk_api_base_url: An override for the default API base URL. :type sdk_api_base_url: str :param events_api_base_url: An override for the default events base URL. :type events_api_base_url: str :param split_definition_file_name: Name of the definition file (Optional) :type split_definition_file_name: str """ api_key, config, _, _ = _init_config(api_key, **kwargs) if api_key == 'localhost': return LocalhostBroker(config, **kwargs) redis = get_redis(config) redis_broker = RedisBroker(redis, config) return redis_broker
def setUp(self): self._some_config = mock.MagicMock() self._split_changes_file_name = join( dirname(__file__), 'splitCustomImpressionListener.json') with open(self._split_changes_file_name) as f: self._json = load(f) split_definition = self._json['splits'][0] split_name = split_definition['name'] self._redis = get_redis( {'redisPrefix': 'customImpressionListenerTest'}) self._redis_split_cache = RedisSplitCache(self._redis) self._redis_split_cache.add_split(split_name, split_definition) self._client = Client(RedisBroker(self._redis, self._some_config)) self.some_feature = 'feature_0' self.some_impression_0 = Impression(matching_key=mock.MagicMock(), feature_name=self.some_feature, treatment=mock.MagicMock(), label=mock.MagicMock(), change_number=mock.MagicMock(), bucketing_key=mock.MagicMock(), time=mock.MagicMock())
def setUp(self): self._some_config = mock.MagicMock() self._split_changes_file_name = join(dirname(__file__), 'splitGetTreatments.json') with open(self._split_changes_file_name) as f: self._json = load(f) split_definition = self._json['splits'][0] split_name = split_definition['name'] self._redis = get_redis({'redisPrefix': 'getTreatmentsTest'}) self._redis_split_cache = RedisSplitCache(self._redis) self._redis_split_cache.add_split(split_name, split_definition) self._client = Client(RedisBroker(self._redis, self._some_config)) self._config = { 'ready': 180000, 'redisDb': 0, 'redisHost': 'localhost', 'redisPosrt': 6379, 'redisPrefix': 'getTreatmentsTest' } self._factory = get_factory('asdqwe123456', config=self._config) self._split = self._factory.client()
def run(arguments): try: config = parse_config_file(arguments['<config_file>']) redis = get_redis(config) impressions_cache = RedisImpressionsCache(redis) sdk_api = api_factory(config) report_impressions(impressions_cache, sdk_api) except: logger.exception('Exception caught posting impressions')
def run(arguments): try: config = parse_config_file(arguments['<config_file>']) redis = get_redis(config) metrics_cache = RedisMetricsCache(redis) sdk_api = api_factory(config) report_metrics(metrics_cache, sdk_api) except: logger.exception('Exception caught posting metrics')
def run(arguments): try: config = parse_config_file(arguments['<config_file>']) redis = get_redis(config) segment_cache = RedisSegmentCache(redis) sdk_api = api_factory(config) segment_change_fetcher = ApiSegmentChangeFetcher(sdk_api) update_segments(segment_cache, segment_change_fetcher) except: logger.exception('Exception caught updating segments')
def testAlgoHandlers(self): ''' ''' redis = get_redis({}) segment_cache = RedisSegmentCache(redis) split_parser = RedisSplitParser(segment_cache) for sp in self._testData: split = split_parser.parse(sp['body'], True) self.assertEqual(split.algo, sp['algo']) self.assertEqual(get_hash_fn(split.algo), sp['hashfn'])
def setUp(self): self.some_config = mock.MagicMock() self.some_api_key = mock.MagicMock() self.redis = get_redis({'redisPrefix': 'test'}) self.client = Client(RedisBroker(self.redis, self.some_config)) input_validator._LOGGER.error = mock.MagicMock() self.logger_error = input_validator._LOGGER.error input_validator._LOGGER.warning = mock.MagicMock() self.logger_warning = input_validator._LOGGER.warning
def _report_metrics(seconds, config): try: while True: redis = get_redis(config) metrics_cache = RedisMetricsCache(redis) sdk_api = api_factory(config) report_metrics(metrics_cache, sdk_api) time.sleep(seconds) except: logger.exception('Exception caught posting metrics')
def _report_impressions(seconds, config): try: while True: redis = get_redis(config) impressions_cache = RedisImpressionsCache(redis) sdk_api = api_factory(config) report_impressions(impressions_cache, sdk_api) time.sleep(seconds) except: logger.exception('Exception caught posting impressions')
def _update_segments(seconds, config): try: while True: redis = get_redis(config) segment_cache = RedisSegmentCache(redis) sdk_api = api_factory(config) segment_change_fetcher = ApiSegmentChangeFetcher(sdk_api) update_segments(segment_cache, segment_change_fetcher) time.sleep(seconds) except: logger.exception('Exception caught updating segments')
def setUp(self): self.some_config = mock.MagicMock() self.some_api_key = mock.MagicMock() self.redis = get_redis({'redisPrefix': 'test'}) self.client = Client(RedisBroker(self.redis, self.some_config)) self.client._broker.fetch_feature = mock.MagicMock( return_value=Split("some_feature", 0, False, "default_treatment", "user", "ACTIVE", 123)) self.client._build_impression = mock.MagicMock() input_validator._LOGGER.error = mock.MagicMock() self.logger_error = input_validator._LOGGER.error input_validator._LOGGER.warning = mock.MagicMock() self.logger_warning = input_validator._LOGGER.warning
def setUp(self): self._some_config = mock.MagicMock() self._split_changes_file_name = join(dirname(__file__), 'splitChangesReadOnly.json') with open(self._split_changes_file_name) as f: self._json = load(f) split_definition = self._json['splits'][0] split_name = split_definition['name'] self._redis = get_redis({'redisPrefix': 'test'}) self._mocked_redis = ReadOnlyRedisMock(self._redis) self._redis_split_cache = RedisSplitCache(self._redis) self._redis_split_cache.add_split(split_name, split_definition) self._client = Client(RedisBroker(self._mocked_redis, self._some_config)) self._impression = mock.MagicMock() self._start = mock.MagicMock() self._operation = mock.MagicMock()
def setUp(self): ''' ''' redis = get_redis({}) segment_cache = RedisSegmentCache(redis) split_parser = RedisSplitParser(segment_cache) self._client = Client(RedisBroker(redis)) self._splitObjects = {} raw_split = { 'name': 'test1', 'algo': 1, 'killed': False, 'status': 'ACTIVE', 'defaultTreatment': 'default', 'seed': -1222652054, 'orgId': None, 'environment': None, 'trafficTypeId': None, 'trafficTypeName': None, 'changeNumber': 1, 'conditions': [{ 'conditionType': 'WHITELIST', 'matcherGroup': { 'combiner': 'AND', 'matchers': [{ 'matcherType': 'ALL_KEYS', 'negate': False, 'userDefinedSegmentMatcherData': None, 'whitelistMatcherData': None }] }, 'partitions': [{ 'treatment': 'on', 'size': 100 }], 'label': 'in segment all' }] } self._splitObjects['whitelist'] = split_parser.parse(raw_split, True) raw_split['name'] = 'test2' raw_split['conditions'][0]['conditionType'] = 'ROLLOUT' self._splitObjects['rollout1'] = split_parser.parse(raw_split, True) raw_split['name'] = 'test3' raw_split['trafficAllocation'] = 1 raw_split['trafficAllocationSeed'] = -1 self._splitObjects['rollout2'] = split_parser.parse(raw_split, True) raw_split['name'] = 'test4' raw_split['trafficAllocation'] = None #must be mapped as 100 raw_split['trafficAllocationSeed'] = -1 self._splitObjects['rollout3'] = split_parser.parse(raw_split, True)