def setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) for split in data['splits']: split_storage.put(splits.from_raw(split)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) segment_storage.put(segments.from_raw(data)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentHumanBeignsChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) segment_storage.put(segments.from_raw(data)) storages = { 'splits': split_storage, 'segments': segment_storage, 'impressions': InMemoryImpressionStorage(5000), 'events': InMemoryEventStorage(5000), 'telemetry': InMemoryTelemetryStorage() } impmanager = ImpressionsManager(ImpressionsMode.OPTIMIZED, True) recorder = StandardRecorder(impmanager, storages['telemetry'], storages['events'], storages['impressions']) self.factory = SplitFactory('some_api_key', storages, True, recorder) # pylint:disable=attribute-defined-outside-init
def _update_splits(self): """ Hit endpoint, update storage and return True if sync is complete. :return: True if synchronization is complete. :rtype: bool """ till = self._split_storage.get_change_number() if till is None: till = -1 try: split_changes = self._api.fetch_splits(till) except APIException: self._logger.error('Failed to fetch split from servers') return False for split in split_changes.get('splits', []): if split['status'] == splits.Status.ACTIVE.value: self._split_storage.put(splits.from_raw(split)) else: self._split_storage.remove(split['name']) self._split_storage.set_change_number(split_changes['till']) return split_changes['till'] == split_changes['since']
def fetch_many(self, split_names): """ Retrieve splits. :param split_names: Names of the features to fetch. :type split_name: list(str) :return: A dict with split objects parsed from redis. :rtype: dict(split_name, splitio.models.splits.Split) """ to_return = dict() try: keys = [self._get_key(split_name) for split_name in split_names] raw_splits = self._redis.mget(keys) for i in range(len(split_names)): split = None try: split = splits.from_raw(json.loads(raw_splits[i])) except (ValueError, TypeError): _LOGGER.error('Could not parse split.') _LOGGER.debug("Raw split that failed parsing attempt: %s", raw_splits[i]) to_return[split_names[i]] = split except RedisAdapterException: _LOGGER.error('Error fetching splits from storage') _LOGGER.debug('Error: ', exc_info=True) return to_return
def setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) for split in data['splits']: split_storage.put(splits.from_raw(split)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) segment_storage.put(segments.from_raw(data)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentHumanBeignsChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) segment_storage.put(segments.from_raw(data)) self.factory = SplitFactory( { #pylint:disable=attribute-defined-outside-init 'splits': split_storage, 'segments': segment_storage, 'impressions': InMemoryImpressionStorage(5000), 'events': InMemoryEventStorage(5000), 'telemetry': InMemoryTelemetryStorage() }, True)
def synchronize_splits(self, till=None): """ Hit endpoint, update storage and return True if sync is complete. :param till: Passed till from Streaming. :type till: int """ while True: change_number = self._split_storage.get_change_number() if change_number is None: change_number = -1 if till is not None and till < change_number: # the passed till is less than change_number, no need to perform updates return try: split_changes = self._api.fetch_splits(change_number) except APIException as exc: _LOGGER.error('Exception raised while fetching splits') _LOGGER.debug('Exception information: ', exc_info=True) raise exc for split in split_changes.get('splits', []): if split['status'] == splits.Status.ACTIVE.value: self._split_storage.put(splits.from_raw(split)) else: self._split_storage.remove(split['name']) self._split_storage.set_change_number(split_changes['till']) if split_changes['till'] == split_changes['since'] \ and (till is None or split_changes['till'] >= till): return
def test_put_fetch(self): """Test storing and retrieving splits in redis.""" adapter = _build_default_client({}) try: storage = RedisSplitStorage(adapter) with open( os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) split_objects = [ splits.from_raw(raw) for raw in split_changes['splits'] ] for split_object in split_objects: raw = split_object.to_json() adapter.set( RedisSplitStorage._SPLIT_KEY.format( split_name=split_object.name), json.dumps(raw)) original_splits = {split.name: split for split in split_objects} fetched_splits = { name: storage.get(name) for name in original_splits.keys() } assert set(original_splits.keys()) == set(fetched_splits.keys()) for original_split in original_splits.values(): fetched_split = fetched_splits[original_split.name] assert original_split.traffic_type_name == fetched_split.traffic_type_name assert original_split.seed == fetched_split.seed assert original_split.algo == fetched_split.algo assert original_split.status == fetched_split.status assert original_split.change_number == fetched_split.change_number assert original_split.killed == fetched_split.killed assert original_split.default_treatment == fetched_split.default_treatment for index, original_condition in enumerate( original_split.conditions): fetched_condition = fetched_split.conditions[index] assert original_condition.label == fetched_condition.label assert original_condition.condition_type == fetched_condition.condition_type assert len(original_condition.matchers) == len( fetched_condition.matchers) assert len(original_condition.partitions) == len( fetched_condition.partitions) adapter.set(RedisSplitStorage._SPLIT_TILL_KEY, split_changes['till']) assert storage.get_change_number() == split_changes['till'] finally: to_delete = [ "SPLITIO.split.sample_feature", "SPLITIO.splits.till", "SPLITIO.split.all_feature", "SPLITIO.split.killed_feature", "SPLITIO.split.Risk_Max_Deductible", "SPLITIO.split.whitelist_feature", "SPLITIO.split.regex_test", "SPLITIO.split.boolean_test", "SPLITIO.split.dependency_test" ] for item in to_delete: adapter.delete(item)
def test_to_split_view(self): """Test SplitView creation.""" as_split_view = splits.from_raw(self.raw).to_split_view() assert isinstance(as_split_view, splits.SplitView) assert as_split_view.name == self.raw['name'] assert as_split_view.change_number == self.raw['changeNumber'] assert as_split_view.killed == self.raw['killed'] assert as_split_view.traffic_type == self.raw['trafficTypeName'] assert set(as_split_view.treatments) == set(['on', 'off'])
def test_to_json(self): """Test json serialization.""" as_json = splits.from_raw(self.raw).to_json() assert isinstance(as_json, dict) assert as_json['changeNumber'] == 123 assert as_json['trafficTypeName'] == 'user' assert as_json['name'] == 'some_name' assert as_json['trafficAllocation'] == 100 assert as_json['trafficAllocationSeed'] == 123456 assert as_json['seed'] == 321654 assert as_json['status'] == 'ACTIVE' assert as_json['killed'] is False assert as_json['defaultTreatment'] == 'off' assert as_json['algo'] == 2 assert len(as_json['conditions']) == 2
def get(self, split_name): # pylint: disable=method-hidden """ Retrieve a split. :param split_name: Name of the feature to fetch. :type split_name: str :return: A split object parsed from redis if the key exists. None otherwise :rtype: splitio.models.splits.Split """ try: raw = self._redis.get(self._get_key(split_name)) return splits.from_raw(json.loads(raw)) if raw is not None else None except RedisAdapterException: self._logger.error('Error fetching split from storage') self._logger.debug('Error: ', exc_info=True) return None
def test_from_raw(self): """Test split model parsing.""" parsed = splits.from_raw(self.raw) assert isinstance(parsed, splits.Split) assert parsed.change_number == 123 assert parsed.traffic_type_name == 'user' assert parsed.name == 'some_name' assert parsed.traffic_allocation == 100 assert parsed.traffic_allocation_seed == 123456 assert parsed.seed == 321654 assert parsed.status == splits.Status.ACTIVE assert parsed.killed is False assert parsed.default_treatment == 'off' assert parsed.algo == splits.HashAlgorithm.MURMUR assert len(parsed.conditions) == 2 assert parsed.get_configurations_for('on') == '{"color": "blue", "size": 13}' assert parsed._configurations == {'on': '{"color": "blue", "size": 13}'}
def get(self, split_name): """ Retrieve a split. :param split_name: Name of the feature to fetch. :type split_name: str :rtype: str """ raw = self._uwsgi.cache_get( self._KEY_TEMPLATE.format(suffix=split_name), _SPLITIO_SPLITS_CACHE_NAMESPACE) to_return = splits.from_raw( json.loads(raw)) if raw is not None else None if not to_return: _LOGGER.warning( "Trying to retrieve nonexistant split %s. Ignoring.", split_name) return to_return
def get_all_splits(self): """ Return all the splits in cache. :return: List of all splits in cache. :rtype: list(splitio.models.splits.Split) """ keys = self._redis.keys(self._get_key('*')) to_return = [] try: raw_splits = self._redis.mget(keys) for raw in raw_splits: try: to_return.append(splits.from_raw(json.loads(raw))) except ValueError: self._logger.error('Could not parse split. Skipping') except RedisAdapterException: self._logger.error('Error fetching all splits from storage') self._logger.debug('Error: ', exc_info=True) return to_return
def _make_split(split_name, conditions, configs=None): """ Make a split with a single all_keys matcher. :param split_name: Name of the split. :type split_name: str. """ return splits.from_raw({ 'changeNumber': 123, 'trafficTypeName': 'user', 'name': split_name, 'trafficAllocation': 100, 'trafficAllocationSeed': 123456, 'seed': 321654, 'status': 'ACTIVE', 'killed': False, 'defaultTreatment': 'control', 'algo': 2, 'conditions': conditions, 'configurations': configs })
def _fetch_until(self, fetch_options, till=None): """ Hit endpoint, update storage and return when since==till. :param fetch_options Fetch options for getting split definitions. :type fetch_options splitio.api.FetchOptions :param till: Passed till from Streaming. :type till: int :return: last change number :rtype: int """ segment_list = set() while True: # Fetch until since==till change_number = self._split_storage.get_change_number() if change_number is None: change_number = -1 if till is not None and till < change_number: # the passed till is less than change_number, no need to perform updates return change_number, segment_list try: split_changes = self._api.fetch_splits(change_number, fetch_options) except APIException as exc: _LOGGER.error('Exception raised while fetching splits') _LOGGER.debug('Exception information: ', exc_info=True) raise exc for split in split_changes.get('splits', []): if split['status'] == splits.Status.ACTIVE.value: parsed = splits.from_raw(split) self._split_storage.put(parsed) segment_list.update(set(parsed.get_segment_names())) else: self._split_storage.remove(split['name']) self._split_storage.set_change_number(split_changes['till']) if split_changes['till'] == split_changes['since']: return split_changes['till'], segment_list