def setUp(self): base.BaseTest.setUp(self) opt_obj = optimizely.Optimizely(json.dumps(self.config_dict_with_features)) self.project_config = opt_obj.config self.decision_service = opt_obj.decision_service self.mock_decision_logger = mock.patch.object(self.decision_service, 'logger') self.mock_config_logger = mock.patch.object(self.project_config, 'logger')
def test_get_variation_for_feature__returns_variation_if_user_not_in_experiment_but_in_rollout( self): """ Test that get_variation_for_feature returns the variation of the experiment in the feature's rollout even if the user is not bucketed into the feature's experiment. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config decision_service = optimizely_instance.decision_service feature = project_config.get_feature_from_key( 'test_feature_in_experiment_and_rollout') expected_variation = project_config.get_variation_from_id( 'test_rollout_exp_1', '211129') with mock.patch( 'optimizely.decision_service.DecisionService.get_variation', side_effect=[None, expected_variation]) as mock_decision: self.assertEqual( expected_variation, decision_service.get_variation_for_feature(feature, 'user1')) self.assertEqual(2, mock_decision.call_count) mock_decision.assert_any_call( project_config.get_experiment_from_key('test_experiment'), 'user1', None) mock_decision.assert_any_call( project_config.get_experiment_from_key('test_rollout_exp_1'), 'user1', None, True)
def test_does_user_meet_audience_conditions__evaluates_audience_conditions_leaf_node(self): """ Test that does_user_meet_audience_conditions correctly evaluates leaf node in audienceConditions. """ opt_obj = optimizely.Optimizely(json.dumps(self.config_dict_with_typed_audiences)) project_config = opt_obj.config_manager.get_config() experiment = project_config.get_experiment_from_key('audience_combinations_experiment') experiment.audienceConditions = '3468206645' with mock.patch('optimizely.helpers.condition.CustomAttributeConditionEvaluator') as custom_attr_eval: audience.does_user_meet_audience_conditions( project_config, experiment.get_audience_conditions_or_ids(), enums.ExperimentAudienceEvaluationLogs, 'audience_combinations_experiment', {}, self.mock_client_logger ) audience_3468206645 = project_config.get_audience('3468206645') custom_attr_eval.assert_has_calls( [ mock.call(audience_3468206645.conditionList, {}, self.mock_client_logger), mock.call().evaluate(0), mock.call().evaluate(1), ], any_order=True, )
def set_obj(self, url=None): if not url: url = 'https://cdn.optimizely.com/json/{0}.json'.format( self.project_id) datafile = self.retrieve_datafile(url) self.obj = optimizely.Optimizely(datafile, None, SimpleLogger())
def test_get_enabled_features(self): """ Test that get_enabled_features only returns features that are enabled for the specified user. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config def side_effect(*args, **kwargs): feature_key = args[0] if feature_key == 'test_feature_1' or feature_key == 'test_feature_2': return True return False with mock.patch('optimizely.optimizely.Optimizely.is_feature_enabled', side_effect=side_effect) as mock_is_feature_enabled: received_features = optimizely_instance.get_enabled_features( 'user_1') expected_enabled_features = ['test_feature_1', 'test_feature_2'] self.assertEqual(sorted(expected_enabled_features), sorted(received_features)) mock_is_feature_enabled.assert_any_call('test_feature_1', 'user_1', None) mock_is_feature_enabled.assert_any_call('test_feature_2', 'user_1', None) mock_is_feature_enabled.assert_any_call('test_feature_in_group', 'user_1', None) mock_is_feature_enabled.assert_any_call( 'test_feature_in_experiment_and_rollout', 'user_1', None)
def test_get_variation_for_feature__returns_variation_for_feature_in_group( self): """ Test that get_variation_for_feature returns the variation of the experiment the user is bucketed in the feature's group. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config decision_service = optimizely_instance.decision_service feature = project_config.get_feature_from_key('test_feature_in_group') expected_variation = project_config.get_variation_from_id( 'group_exp_1', '28901') with mock.patch( 'optimizely.decision_service.DecisionService.get_experiment_in_group', return_value=project_config.get_experiment_from_key('group_exp_1')) as mock_get_experiment_in_group, \ mock.patch('optimizely.decision_service.DecisionService.get_variation', return_value=expected_variation) as mock_decision: self.assertEqual( expected_variation, decision_service.get_variation_for_feature(feature, 'user1')) mock_get_experiment_in_group.assert_called_once_with( project_config.get_group('19228'), 'user1') mock_decision.assert_called_once_with( project_config.get_experiment_from_key('group_exp_1'), 'user1', None)
def get_client_and_optimizely_parameters(cls, user_dict): client = optimizely.Optimizely(sdk_key=settings.OPTIMIZELY_SDK_KEY) user_id = '' if user_dict: user_id = user_dict.pop('id') return client, user_id, user_dict
def test_skip_json_validation_true(self): """ Test that on setting skip_json_validation to true, JSON schema validation is not performed. """ with mock.patch('optimizely.helpers.validator.is_datafile_valid' ) as mock_datafile_validation: optimizely.Optimizely(json.dumps(self.config_dict), skip_json_validation=True) self.assertEqual(0, mock_datafile_validation.call_count)
def test_does_user_meet_audience_conditions__evaluates_audience_conditions(self): # Using experiment as rule for testing log messages opt_obj = optimizely.Optimizely(json.dumps(self.config_dict_with_typed_audiences)) project_config = opt_obj.config_manager.get_config() experiment = project_config.get_experiment_from_key('audience_combinations_experiment') experiment.audienceIds = [] experiment.audienceConditions = [ 'or', ['or', '3468206642', '3988293898', '3988293899'], ] audience_3468206642 = project_config.get_audience('3468206642') audience_3988293898 = project_config.get_audience('3988293898') audience_3988293899 = project_config.get_audience('3988293899') with mock.patch( 'optimizely.helpers.condition.CustomAttributeConditionEvaluator.evaluate', side_effect=[False, None, True], ): audience.does_user_meet_audience_conditions( project_config, experiment.get_audience_conditions_or_ids(), enums.RolloutRuleAudienceEvaluationLogs, 'test_rule', {}, self.mock_client_logger ) self.assertEqual(7, self.mock_client_logger.debug.call_count) self.assertEqual(1, self.mock_client_logger.info.call_count) self.mock_client_logger.assert_has_calls( [ mock.call.debug( 'Evaluating audiences for rule ' 'test_rule: ["or", ["or", "3468206642", ' '"3988293898", "3988293899"]].' ), mock.call.debug( 'Starting to evaluate audience "3468206642" with ' 'conditions: ' + audience_3468206642.conditions + '.' ), mock.call.debug('Audience "3468206642" evaluated to FALSE.'), mock.call.debug( 'Starting to evaluate audience "3988293898" with ' 'conditions: ' + audience_3988293898.conditions + '.' ), mock.call.debug('Audience "3988293898" evaluated to UNKNOWN.'), mock.call.debug( 'Starting to evaluate audience "3988293899" with ' 'conditions: ' + audience_3988293899.conditions + '.' ), mock.call.debug('Audience "3988293899" evaluated to TRUE.'), mock.call.info( 'Audiences for rule test_rule collectively evaluated to TRUE.' ), ] )
def test_init__invalid_datafile__logs_error(self): """ Test that invalid datafile logs error on init. """ with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: opt_obj = optimizely.Optimizely('invalid_datafile') mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Provided "datafile" is in an invalid format.') self.assertFalse(opt_obj.is_valid)
def set_obj(self, url=None): if not url: url = 'https://cdn.optimizely.com/json/{0}.json'.format( self.project_id) datafile = self.retrieve_datafile(url) self.obj = optimizely.Optimizely( datafile, logger=SimpleLogger(), event_dispatcher=RedisDeferredDispatcher())
def test_track__invalid_object(self): """ Test that track logs error if Optimizely object is not created correctly. """ opt_obj = optimizely.Optimizely('invalid_file') with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: opt_obj.track('test_event', 'test_user') mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Datafile has invalid format. Failing "track".')
def test_activate__invalid_object(self): """ Test that activate logs error if Optimizely object is not created correctly. """ opt_obj = optimizely.Optimizely('invalid_file') with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: self.assertIsNone(opt_obj.activate('test_experiment', 'test_user')) mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Datafile has invalid format. Failing "activate".')
def test_init__v1_datafile__logs_error(self): """ Test that v1 datafile logs error on init. """ self.config_dict['version'] = project_config.V1_CONFIG_VERSION with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: opt_obj = optimizely.Optimizely(json.dumps(self.config_dict)) mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Provided datafile has unsupported version. Please use SDK version 1.1.0 or earlier for datafile version 1.' ) self.assertFalse(opt_obj.is_valid)
def __init__(self, sdk_key=None): if not sdk_key: raise ValueError( "You must supply a valid Optimizely SDK-key. Did you remember to set settings.OPTIMIZELY_SDK_KEY?") logger.info(f"Initializing Optimizely feature flag client.") try: self._optimizely_client = optimizely.Optimizely(sdk_key=sdk_key) # You can change the default-setting like this: # self._optimizely_client.config_manager.set_blocking_timeout(20) self._optimizely_client.config_manager.set_update_interval(settings.OPTIMIZELY_UPDATE_INTERVAL_SECONDS) except Exception: logger.exception("Unexpected failure when trying to initialize Optimizely feature flag client.")
def set_instance(self): """Initializing the Optimizely instance.""" if self.sdk_key is None: self.datafile = open('datafile.json', 'r').read() else: url = 'https://cdn.optimizely.com/datafiles/{0}.json'.format( self.sdk_key) self.datafile = requests.get(url).text self.instance = optimizely.Optimizely(self.datafile, None, SimpleLogger()) return self.datafile
def test_init__invalid_error_handler__logs_error(self): """ Test that invalid error_handler logs error on init. """ class InvalidErrorHandler(object): pass with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: opt_obj = optimizely.Optimizely(json.dumps(self.config_dict), error_handler=InvalidErrorHandler) mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Provided "error_handler" is in an invalid format.') self.assertFalse(opt_obj.is_valid)
def test_is_feature_enabled__returns_false_for_invalid_feature(self): """ Test that the feature is not enabled for the user if the provided feature key is invalid. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config with mock.patch( 'optimizely.decision_service.DecisionService.get_variation_for_feature' ) as mock_decision: self.assertFalse( optimizely_instance.is_feature_enabled('invalid_feature', 'user1')) self.assertFalse(mock_decision.called)
def test_invalid_json_raises_schema_validation_off(self): """ Test that invalid JSON logs error if schema validation is turned off. """ # Not JSON with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: optimizely.Optimizely('invalid_json', skip_json_validation=True) mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Provided "datafile" is in an invalid format.') # JSON having valid version, but entities have invalid format with mock.patch('optimizely.logger.SimpleLogger.log') as mock_logging: optimizely.Optimizely( { 'version': '2', 'events': 'invalid_value', 'experiments': 'invalid_value' }, skip_json_validation=True) mock_logging.assert_called_once_with( enums.LogLevels.ERROR, 'Provided "datafile" is in an invalid format.')
def test_does_user_meet_audience_conditions__evaluates_audience_conditions( self): """ Test that does_user_meet_audience_conditions correctly evaluates audienceConditions and calls custom attribute evaluator for leaf nodes. """ opt_obj = optimizely.Optimizely( json.dumps(self.config_dict_with_typed_audiences)) project_config = opt_obj.config_manager.get_config() experiment = project_config.get_experiment_from_key( 'audience_combinations_experiment') experiment.audienceIds = [] experiment.audienceConditions = [ 'or', ['or', '3468206642', '3988293898'], ['or', '3988293899', '3468206646'], ] with mock.patch( 'optimizely.helpers.condition.CustomAttributeConditionEvaluator' ) as custom_attr_eval: audience.does_user_meet_audience_conditions( project_config, experiment.get_audience_conditions_or_ids(), enums.ExperimentAudienceEvaluationLogs, 'audience_combinations_experiment', {}, self.mock_client_logger) audience_3468206642 = project_config.get_audience('3468206642') audience_3988293898 = project_config.get_audience('3988293898') audience_3988293899 = project_config.get_audience('3988293899') audience_3468206646 = project_config.get_audience('3468206646') custom_attr_eval.assert_has_calls( [ mock.call(audience_3468206642.conditionList, {}, self.mock_client_logger), mock.call(audience_3988293898.conditionList, {}, self.mock_client_logger), mock.call(audience_3988293899.conditionList, {}, self.mock_client_logger), mock.call(audience_3468206646.conditionList, {}, self.mock_client_logger), mock.call().evaluate(0), mock.call().evaluate(0), mock.call().evaluate(0), mock.call().evaluate(0), ], any_order=True, )
def test_is_feature_enabled__returns_true_if_user_is_bucketed_into_a_variation( self): """ Test that the feature is not enabled for the user if the provided feature key is invalid. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config feature = project_config.get_feature_from_key('test_feature_1') with mock.patch( 'optimizely.decision_service.DecisionService.get_variation_for_feature', return_value=project_config.get_variation_from_id( 'test_experiment', '111129')) as mock_decision: self.assertTrue( optimizely_instance.is_feature_enabled('test_feature_1', 'user1')) mock_decision.assert_called_once_with(feature, 'user1', None)
def test_get_variation_for_feature__returns_none_for_user_not_in_rollout( self): """ Test that get_variation_for_feature returns None for the user not in the associated rollout. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config decision_service = optimizely_instance.decision_service feature = project_config.get_feature_from_key('test_feature_2') with mock.patch( 'optimizely.decision_service.DecisionService.get_variation', return_value=None) as mock_decision: self.assertIsNone( decision_service.get_variation_for_feature(feature, 'user1')) mock_decision.assert_called_once_with( project_config.get_experiment_from_key('test_rollout_exp_1'), 'user1', None, True)
def test_is_user_in_experiment__evaluates_audience_conditions(self): opt_obj = optimizely.Optimizely( json.dumps(self.config_dict_with_typed_audiences)) project_config = opt_obj.config_manager.get_config() experiment = project_config.get_experiment_from_key( 'audience_combinations_experiment') experiment.audienceIds = [] experiment.audienceConditions = [ 'or', ['or', '3468206642', '3988293898', '3988293899'] ] audience_3468206642 = project_config.get_audience('3468206642') audience_3988293898 = project_config.get_audience('3988293898') audience_3988293899 = project_config.get_audience('3988293899') with mock.patch( 'optimizely.helpers.condition.CustomAttributeConditionEvaluator.evaluate', side_effect=[False, None, True]): audience.is_user_in_experiment(project_config, experiment, {}, self.mock_client_logger) self.assertEqual(4, self.mock_client_logger.debug.call_count) self.assertEqual(4, self.mock_client_logger.info.call_count) self.mock_client_logger.assert_has_calls([ mock.call.debug( 'Evaluating audiences for experiment "audience_combinations_experiment": ["or", ["or", "3468206642", ' '"3988293898", "3988293899"]].'), mock.call.debug('Starting to evaluate audience "3468206642" with ' 'conditions: ' + audience_3468206642.conditions + '.'), mock.call.info('Audience "3468206642" evaluated to FALSE.'), mock.call.debug('Starting to evaluate audience "3988293898" with ' 'conditions: ' + audience_3988293898.conditions + '.'), mock.call.info('Audience "3988293898" evaluated to UNKNOWN.'), mock.call.debug('Starting to evaluate audience "3988293899" with ' 'conditions: ' + audience_3988293899.conditions + '.'), mock.call.info('Audience "3988293899" evaluated to TRUE.'), mock.call.info( 'Audiences for experiment "audience_combinations_experiment" collectively evaluated to TRUE.' ) ])
def request_datafile(self, timeout=None): DATAFILE_URL = 'https://cdn.optimizely.com/datafiles/%s.json' % self.sdk_key try: latest_datafile = requests.get(DATAFILE_URL, timeout=timeout).json() except: self.logger.log(logging.WARNING, 'Optimizely: Timeout hit while trying to fetch the datafile') else: if int(self.current_datafile['revision']) < int(latest_datafile['revision']): self.logger.log(logging.INFO, 'Optimizely: Received an updated datafile and is initializing') self.current_datafile = latest_datafile # TODO: Preserve the notification center # TODO: Make this thread-safe self.optimizely_client_instance = optimizely.Optimizely( datafile=json.dumps(latest_datafile), logger=self.logger, **self.sdkParameters )
def test_get_variation_for_feature__returns_none_for_user_in_group_but_experiment_not_associated_with_feature( self): """ Test that if a user is in the mutex group but the experiment is not targeting a feature, the feature should not be enabled. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config decision_service = optimizely_instance.decision_service feature = project_config.get_feature_from_key('test_feature_in_group') with mock.patch( 'optimizely.decision_service.DecisionService.get_experiment_in_group', return_value=project_config.get_experiment_from_key( 'group_exp_2')) as mock_decision: self.assertIsNone( decision_service.get_variation_for_feature(feature, 'user_1')) mock_decision.assert_called_once_with( project_config.get_group('19228'), 'user_1')
def before_request(): global user_profile_service_instance global optimizely_instance user_profile_service_instance = None optimizely_instance = None request.payload = request.get_json() user_profile_service_instance = request.payload.get('user_profile_service') if user_profile_service_instance: ups_class = getattr(user_profile_service, request.payload.get('user_profile_service')) user_profile_service_instance = ups_class(request.payload.get('user_profiles')) with_listener = request.payload.get('with_listener') log_level = environ.get('OPTIMIZELY_SDK_LOG_LEVEL', 'DEBUG') min_level = getattr(logging, log_level) optimizely_instance = optimizely.Optimizely( datafile_content, logger=logger.SimpleLogger(min_level=min_level), user_profile_service=user_profile_service_instance, ) if with_listener is not None: for listener_add in with_listener: if listener_add['type'] == 'Activate': count = int(listener_add['count']) for i in range(count): # make a value copy so that we can add multiple callbacks. a_cb = copy_func(on_activate) optimizely_instance.notification_center.add_notification_listener( enums.NotificationTypes.ACTIVATE, a_cb ) if listener_add['type'] == 'Track': count = int(listener_add['count']) for i in range(count): # make a value copy so that we can add multiple callbacks. t_cb = copy_func(on_track) optimizely_instance.notification_center.add_notification_listener( enums.NotificationTypes.TRACK, t_cb )
def test_get_variation_for_feature__returns_none_for_user_not_in_group( self): """ Test that get_variation_for_feature returns None for user not in group and the feature is not part of a rollout. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config decision_service = optimizely_instance.decision_service feature = project_config.get_feature_from_key('test_feature_in_group') with mock.patch( 'optimizely.decision_service.DecisionService.get_experiment_in_group', return_value=None) as mock_get_experiment_in_group, \ mock.patch('optimizely.decision_service.DecisionService.get_variation') as mock_decision: self.assertIsNone( decision_service.get_variation_for_feature(feature, 'user1')) mock_get_experiment_in_group.assert_called_once_with( project_config.get_group('19228'), 'user1') self.assertFalse(mock_decision.called)
def test_get_variation_for_feature__returns_variation_for_feature_in_experiment( self): """ Test that get_variation_for_feature returns the variation of the experiment the feature is associated with. """ optimizely_instance = optimizely.Optimizely( json.dumps(self.config_dict_with_features)) project_config = optimizely_instance.config decision_service = optimizely_instance.decision_service feature = project_config.get_feature_from_key('test_feature_1') expected_variation = project_config.get_variation_from_id( 'test_experiment', '111129') with mock.patch( 'optimizely.decision_service.DecisionService.get_variation', return_value=expected_variation) as mock_decision: self.assertEqual( expected_variation, decision_service.get_variation_for_feature(feature, 'user1')) mock_decision.assert_called_once_with( project_config.get_experiment_from_key('test_experiment'), 'user1', None)
def test_is_user_in_experiment__evaluates_audience_conditions(self): """ Test that is_user_in_experiment correctly evaluates audienceConditions and calls custom attribute evaluator for leaf nodes. """ opt_obj = optimizely.Optimizely( json.dumps(self.config_dict_with_typed_audiences)) project_config = opt_obj.config experiment = project_config.get_experiment_from_key( 'audience_combinations_experiment') experiment.audienceIds = [] experiment.audienceConditions = [ 'or', ['or', '3468206642', '3988293898'], [ 'or', '3988293899', '3468206646', ] ] with mock.patch( 'optimizely.helpers.condition.CustomAttributeConditionEvaluator' ) as custom_attr_eval: audience.is_user_in_experiment(project_config, experiment, {}) audience_3468206642 = project_config.get_audience('3468206642') audience_3988293898 = project_config.get_audience('3988293898') audience_3988293899 = project_config.get_audience('3988293899') audience_3468206646 = project_config.get_audience('3468206646') custom_attr_eval.assert_has_calls([ mock.call(audience_3468206642.conditionList, {}), mock.call(audience_3988293898.conditionList, {}), mock.call(audience_3988293899.conditionList, {}), mock.call(audience_3468206646.conditionList, {}), mock.call().evaluate(0), mock.call().evaluate(0), mock.call().evaluate(0), mock.call().evaluate(0) ], any_order=True)
def test_is_user_in_experiment__evaluates_audience_conditions_leaf_node( self): """ Test that is_user_in_experiment correctly evaluates leaf node in audienceConditions. """ opt_obj = optimizely.Optimizely( json.dumps(self.config_dict_with_typed_audiences)) project_config = opt_obj.config experiment = project_config.get_experiment_from_key( 'audience_combinations_experiment') experiment.audienceConditions = '3468206645' with mock.patch( 'optimizely.helpers.condition.CustomAttributeConditionEvaluator' ) as custom_attr_eval: audience.is_user_in_experiment(project_config, experiment, {}) audience_3468206645 = project_config.get_audience('3468206645') custom_attr_eval.assert_has_calls([ mock.call(audience_3468206645.conditionList, {}), mock.call().evaluate(0), mock.call().evaluate(1), ], any_order=True)