def create_app(test_config=None): """ This function creates our flask app. If we are performing unit tests, it sets our redis client to be a mock redis client. It also registers endpoints that we are using. @param test_config: Tells us if we are testing or not @return: this app, for testing purposes or running normally """ main_app = Flask(__name__, instance_relative_config=True) if test_config is None: # k, v store for term -> doc_id main_app.index = Redis(host='localhost', port=6379, db=0) # k, v store for doc:term -> list of positions main_app.term_positions = Redis(host='localhost', port=6379, db=1) else: # Same db's as above, but the mock_redis version main_app.index = FakeRedis(host='localhost', port=6379, db=0) main_app.term_positions = FakeRedis(host='localhost', port=6379, db=1) # sets the initial value total docs for tf-idf main_app.index.set("total_docs", 0) # Specify that we are in testing config for debugging main_app.config['TESTING'] = True # Tells this instance of flask where our endpoints are main_app.register_blueprint(bp) return main_app
class NodeTimeoutProcessCommandTestCase(TestCase): def setUp(self): self.node_pool = "test_node_pool" self.command = Command() @override_settings(redis_inst=FakeRedis()) @override_settings(EXECUTING_NODE_POOL="test_node_pool") def test_command_simple(self): now = datetime.datetime.now().timestamp() settings.redis_inst.zadd(self.node_pool, mapping={"test": now}) self.assertEqual(settings.redis_inst.zcard(self.node_pool), 1) self.command._pop_timeout_nodes(settings.redis_inst, self.node_pool) self.assertEqual(settings.redis_inst.zcard(self.node_pool), 0) self.assertEqual(len(TimeoutNodesRecord.objects.all()), 1) @override_settings(redis_inst=FakeRedis()) @override_settings(EXECUTING_NODE_POOL="test_node_pool") def test_command_complicated(self): now = datetime.datetime.now() time1 = now.timestamp() time2 = (now + datetime.timedelta(minutes=5)).timestamp() settings.redis_inst.zadd(self.node_pool, mapping={ "time1": time1, "time2": time2 }) self.assertEqual(settings.redis_inst.zcard(self.node_pool), 2) self.command._pop_timeout_nodes(settings.redis_inst, self.node_pool) self.assertEqual(settings.redis_inst.zcard(self.node_pool), 1) self.assertEqual(len(TimeoutNodesRecord.objects.all()), 1)
def test_track_custom_event_failures(self, mock_redis): fake_redis = FakeRedis() fake_redis.set('custom_event_type', '["Random Event Log", "Failure"]') # Create and initialize Salt Nanny salt_nanny = SaltNanny(self.cache_config) salt_nanny.cache_client.redis_instance = fake_redis salt_nanny.min_interval = 1 return_code = salt_nanny.track_custom_event_failures('custom_event_type', ['Failure'], 2) self.assertTrue(return_code > 0)
def setUp(self): self.redis_mock = mock.patch(QUEUE_MODULE + 'redis').start() self.redis_mock.redis.return_value = FakeRedis() self.queue = RedisQueue('test', FakeRedis()) self.queue.put('test') self.queue.put('test') self.queue.get = mock.Mock() self.queue.get.side_effect = ['msg1', 'msg2', ValueError] self.worker = MyWorker(self.queue)
def setUp(self): self.node_id = "node_id" self.version = "version" self.redis_inst = FakeRedis() self.time_config = TimeoutNodeConfig.objects.create( task_id=1, root_pipeline_id="root_pipeline_id", action="forced_fail", node_id=self.node_id, timeout=5)
def test_track_custom_event_failures(self, mock_redis): fake_redis = FakeRedis() fake_redis.set('custom_event_type', '["Random Event Log", "Failure"]') # Create and initialize Salt Nanny salt_nanny = SaltNanny(self.cache_config) salt_nanny.cache_client.redis_instance = fake_redis salt_nanny.min_interval = 1 return_code = salt_nanny.track_custom_event_failures( 'custom_event_type', ['Failure'], 2) self.assertTrue(return_code > 0)
def redis(monkeypatch) -> FakeRedis: fake_redis = FakeRedis() def get_redis_connection(*args, **kwargs): return fake_redis monkeypatch.setattr("catalog.api.utils.throttle.get_redis_connection", get_redis_connection) yield fake_redis fake_redis.client().close()
def test_process_results(): database = FakeRedis() dir_path = os.path.dirname(os.path.realpath(__file__)) processor = ResultsProcessor(JobQueue(database), MockDataStorage()) with open(f'{dir_path}/data/dockets_listing.json') as listings: data = listings.read() processor.process_results(json.loads(data)) assert database.llen('jobs_waiting_queue') == 10
def test_job_added_with_next_id(): database = FakeRedis() database.set('last_job_id', 42) queue = JobQueue(database) queue.add_job('http://a.b.c') assert queue.get_num_jobs() == 1 job = queue.get_job() assert job['job_id'] == 43 assert job['url'] == 'http://a.b.c'
def setUp(self): self.session = Session(region_name='us-west-1') pill = placebo.attach(self.session, 'tests/sit/test_data') pill.playback() redis_client = FakeRedis() jid = 123456 redis_client.lpush('test-1-php:state.highstate', jid) redis_client.lpush('test-1-lb:state.highstate', jid) redis_client.set('test-1-php:{0}'.format(jid), '{"result": "false","return": {"test": "true"}}') redis_client.set('test-1-lb:{0}'.format(jid), '{"result": "false","return": {"test": "true"}}') self.redis_client = RedisClient() self.redis_client.redis_instance = redis_client
def redis(monkeypatch) -> FakeRedis: fake_redis = FakeRedis() def get_redis_connection(*args, **kwargs): return fake_redis monkeypatch.setattr( f"{command_module_path}.get_redis_connection", get_redis_connection ) yield fake_redis fake_redis.client().close()
def test_work_generator_large(requests_mock, mocker): mocker.patch('time.sleep') results = MockDataSet(6666).get_results() requests_mock.get('https://api.regulations.gov/v4/documents', results) database = FakeRedis() api = RegulationsAPI('FAKE_KEY') job_queue = JobQueue(database) storage = MockDataStorage() generator = WorkGenerator(job_queue, api, storage) generator.download('documents') assert database.llen('jobs_waiting_queue') == 6666
def __init__(self, redis: Redis = FakeRedis(), subscribers: Dict[str, Callable] = None): super().__init__(subscribers) self._redis = redis self._pubsub = self._redis.pubsub() self._subscribe()
def test_should_unsubscribe_all_successfully(make_user_created_event): given_any_user_id_1 = UserId("user_id_1") given_any_user_id_2 = UserId("user_id_2") given_any_topic = "topic" def redis_event_handler(message): event = event_from_redis_message(message) global received_events received_events.append(event) if isinstance(event, make_user_created_event().__class__): assert event.user_id == "user_id_1" event_manager = RedisEventManager( redis=FakeRedis(), subscribers={given_any_topic: redis_event_handler}) event_manager.send(given_any_topic, make_user_created_event(user_id=given_any_user_id_1)) event_manager.unsubscribe_all() event_manager.send(given_any_topic, make_user_created_event(user_id=given_any_user_id_2)) await_for_events() assert len(received_events) == 2
def test_short_handles_duplicates(monkeypatch: MonkeyPatch, db: FakeRedis, urls: List[str]) -> None: shortener = Shortener(db) monkeypatch.setattr(shortener, 'mapping', value="aA0") for url in urls: shortener.long_to_short(url) size = db.dbsize() assert size == len(urls)
def get_redis() -> Redis: settings = get_settings() if settings.SCORETRACKER_TESTING_MODE: return FakeRedis(decode_responses=True) if settings.REDIS_URL is None: return Redis(decode_responses=True) return Redis.from_url(settings.REDIS_URL, decode_responses=True)
def __init__(self, host: str, port: int = 6379, db: int = 0): if environ.env.config.get(ConfigKeys.TESTING, False) or host == 'mock': from fakeredis import FakeRedis as Redis else: from redis import Redis self.redis = Redis(host=host, port=port, db=db)
def setUp(self): self.start_time_stamp = time.time() self.times_config = MockTaskOperationTimesConfig({ "times": 10, "time_unit": "m" }) setattr(settings, "redis_inst", FakeRedis())
def test_track_returns(self, mock_redis): fake_redis = FakeRedis() # Create and initialize Salt Nanny salt_nanny = SaltNanny(self.cache_config) salt_nanny.cache_client.redis_instance = fake_redis salt_nanny.initialize(['minion1', 'minion2']) salt_nanny.min_interval = 1 # Make Redis Returns available in fake redis fake_redis.set('minion1:state.highstate', '1234') fake_redis.set('minion2:state.highstate', '4321') fake_redis.hset('ret:1234', 'minion1', 'Highstate Result1') fake_redis.hset('ret:1234', 'minion2', 'Highstate Result1') # Start tracking returns salt_nanny.track_returns()
def test_parse_last_return(self, mock_redis): fake_redis = FakeRedis() # Create and initialize Salt Nanny salt_nanny = SaltNanny(self.cache_config) salt_nanny.cache_client.redis_instance = fake_redis salt_nanny.initialize(['minion1']) salt_nanny.min_interval = 1 with open('{0}/resources/highstate.json'.format(os.path.dirname(__file__)), 'r') as f: json = f.read() # Make Redis Returns available in fake redis fake_redis.set('minion1:state.highstate', '6789') fake_redis.set('minion1:6789', json) self.assertTrue(salt_nanny.parse_last_return() > 0)
def mock_flask_server(create_server): redis_server = FakeServer() mock_db = FakeRedis(server=redis_server) server = create_server(mock_db) server.redis_server = redis_server server.app.config['TESTING'] = True server.client = server.app.test_client() return server
def client_db_disconnected() -> Generator: server = FakeServer() server.connected = False db = FakeRedis(server=server) app.dependency_overrides[get_db] = lambda: db with TestClient(app) as client: yield client app.dependency_overrides = {}
def redis_client_mock(): from fakeredis import FakeRedis from io_storages.redis.models import RedisStorageMixin redis = FakeRedis() # TODO: add mocked redis data with mock.patch.object(RedisStorageMixin, 'get_redis_connection', return_value=redis): yield
def mock_work_server(create_server): redis_server = FakeServer() mock_db = FakeRedis(server=redis_server) server = create_server(mock_db) server.redis_server = redis_server server.app.config['TESTING'] = True server.client = server.app.test_client() server.data = MockDataStorage() return server
def test_long_to_short(shortener_info: Dict, db: FakeRedis, long_url: str) -> None: shortener = Shortener(db) short_key = shortener.long_to_short(long_url) key_length = shortener_info["key_length"] key_prefix = shortener_info["key_prefix"] mapping = shortener_info["mapping"] assert len(short_key) == key_length assert db.get(key_prefix + short_key) == long_url assert all([char in mapping for char in short_key])
class NodeTimeoutInfoUpdateTestCase(TestCase): def setUp(self): self.node_id = "node_id" self.version = "version" self.redis_inst = FakeRedis() self.time_config = TimeoutNodeConfig.objects.create( task_id=1, root_pipeline_id="root_pipeline_id", action="forced_fail", node_id=self.node_id, timeout=5) def test__node_timeout_info_update_running_state(self): to_state = bamboo_engine_states.RUNNING _node_timeout_info_update(self.redis_inst, to_state, self.node_id, self.version) self.assertEqual(self.redis_inst.zcard(settings.EXECUTING_NODE_POOL), 1) another_version = "version2" _node_timeout_info_update(self.redis_inst, to_state, self.node_id, another_version) self.assertEqual(self.redis_inst.zcard(settings.EXECUTING_NODE_POOL), 2) def test__node_timeout_info_update_finish_state(self): to_state = bamboo_engine_states.RUNNING _node_timeout_info_update(self.redis_inst, to_state, self.node_id, self.version) to_state = bamboo_engine_states.FINISHED _node_timeout_info_update(self.redis_inst, to_state, self.node_id, self.version) self.assertEqual(self.redis_inst.zcard(settings.EXECUTING_NODE_POOL), 0) def test__node_timeout_info_update_fail_state(self): to_state = bamboo_engine_states.RUNNING _node_timeout_info_update(self.redis_inst, to_state, self.node_id, self.version) to_state = bamboo_engine_states.FAILED _node_timeout_info_update(self.redis_inst, to_state, self.node_id, self.version) self.assertEqual(self.redis_inst.zcard(settings.EXECUTING_NODE_POOL), 0)
def mock_dashboard_server(create_server): redis_server = FakeServer() mock_db = FakeRedis(server=redis_server) mock_docker = MagicMock() server = create_server(mock_db, mock_docker) server.redis_server = redis_server server.app.config['TESTING'] = True server.client = server.app.test_client() server.data = MockDataStorage() return server
def test_first_job_added_with_id_0(): database = FakeRedis() queue = JobQueue(database) queue.add_job('http://a.b.c') assert queue.get_num_jobs() == 1 job = queue.get_job() assert job['job_id'] == 1 assert job['url'] == 'http://a.b.c'
def test_all_methods(): db = RedisExampleDatabase(FakeRedis()) db.save(b"key1", b"value") assert list(db.fetch(b"key1")) == [b"value"] db.move(b"key1", b"key2", b"value") assert list(db.fetch(b"key1")) == [] assert list(db.fetch(b"key2")) == [b"value"] db.delete(b"key2", b"value") assert list(db.fetch(b"key2")) == [] db.delete(b"key2", b"unknown value")
def init_app(app): login_manager.init_app(app) global redis if app.testing: from fakeredis import FakeRedis redis = FakeRedis() else: from redis import Redis redis = Redis.from_url(app.config['REDIS_URL'])
def remove_test_data(): test_user = studybot.User.query.filter_by(fb_id=DUMMY_SENDER_ID).one_or_none() if test_user: if test_user.facts: for fact in test_user.facts: studybot.db.session.delete(fact) studybot.db.session.delete(test_user) studybot.db.session.commit() global RESPONSES RESPONSES = [] FakeRedis().flushall()
def create_app(config: BaseConfig) -> Flask: """ Creates and returns a Flask application object configured :param config: A config object usable by Flask's 'app.config.from_object()' function. :type config: object :return: The Flask application object :rtype: Flask """ app = Flask(config.SERVICE_NAME) app.config.from_object(config) # Configure Redis client if app.testing: redis_store = FlaskRedis.from_custom_provider(FakeRedis()) else: redis_store = FlaskRedis() redis_store.init_app(app) app.redis = redis_store logger = getLogger("werkzeug") logger.addFilter(HealthLogFilter()) api_blueprint = Blueprint(config.SERVICE_NAME, __name__) health_blueprint = Blueprint("healthcheck", __name__) api = Api( api_blueprint, title=f"{config.SERVICE_LONG_NAME} API", version=f"{config.API_VERSION}", description=config.SERVICE_DESCRIPTION, ) # Add Keyvalue store api namespace api.add_namespace(keyvaluestore_api, path="") # Add endpoints for prometheus_client register_metrics(app, app_version="0.0.1", app_config="production") healthcheck = Api( health_blueprint, title="Healthcheck Endpoint", version="1", description= f"An API returning health information for the {config.SERVICE_NAME} service.", ) healthcheck.add_namespace(healthcheck_api, path="") app.register_blueprint(api_blueprint, url_prefix="/api") app.register_blueprint(health_blueprint, url_prefix="/health") return app, redis_store
def __init__(self, *args, **kwargs): # Used fakeredis for testing (don't affect production redis) from fakeredis import FakeRedis, FakeStrictRedis import django_rq.queues simple_redis = FakeRedis() strict_redis = FakeStrictRedis() django_rq.queues.get_redis_connection = lambda _, strict: strict_redis \ if strict else simple_redis # Run all RQ requests syncroniously for config in RQ_QUEUES.values(): config["ASYNC"] = False super().__init__(*args, **kwargs)
def setup_fakeredis(self): redis_client = FakeRedis() jid = 123456 redis_client.lpush('php:state.highstate', jid) redis_client.set('php:{0}'.format(jid), '{"result": false}') self.redis_client.redis_instance = redis_client