def test_count_repository_actions( repository, day, count_response, expected_request, expected_count, throws, logs_model, mock_elasticsearch, mock_db_model, app_config, ): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) mock_elasticsearch.count = Mock(return_value=count_response) mock_elasticsearch.list_indices = Mock(return_value=INDEX_LIST_RESPONSE) configure(app_config) if throws: with pytest.raises(Exception): logs_model.count_repository_actions(repository, day) else: count = logs_model.count_repository_actions(repository, day) assert count == expected_count if expected_request: mock_elasticsearch.count.assert_called_with(expected_request)
def test_yield_logs_for_export(start_datetime, end_datetime, repository_id, namespace_id, max_query_time, scroll_responses, expected_requests, expected_logs, throws, logs_model, mock_elasticsearch, mock_db_model, mock_max_result_window, app_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) mock_elasticsearch.search_scroll_create = Mock(return_value=scroll_responses[0]) mock_elasticsearch.scroll_get = Mock(side_effect=scroll_responses[1:-1]) mock_elasticsearch.scroll_delete = Mock(return_value=scroll_responses[-1]) configure(app_config) if throws: with pytest.raises(Exception): logs_model.yield_logs_for_export(start_datetime, end_datetime, max_query_time=max_query_time) else: log_generator = logs_model.yield_logs_for_export(start_datetime, end_datetime, max_query_time=max_query_time) counter = 0 for logs in log_generator: if counter == 0: mock_elasticsearch.search_scroll_create.assert_called_with(*expected_requests[counter]) else: mock_elasticsearch.scroll_get.assert_called_with(*expected_requests[counter]) assert expected_logs[counter] == logs counter += 1 # the last two requests must be # 1. get with response scroll with 0 hits, which indicates the termination condition # 2. delete scroll request mock_elasticsearch.scroll_get.assert_called_with(*expected_requests[-2]) mock_elasticsearch.scroll_delete.assert_called_with(*expected_requests[-1])
def test_lookup_logs(start_datetime, end_datetime, performer_name, repository_name, namespace_name, filter_kinds, page_token, max_page_count, search_response, list_indices_response, expected_request, expected_page, throws, logs_model, mock_elasticsearch, mock_db_model, mock_page_size, app_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) mock_elasticsearch.search_after = Mock(return_value=search_response) mock_elasticsearch.list_indices = Mock(return_value=list_indices_response) configure(app_config) if throws: with pytest.raises(Exception): logs_model.lookup_logs(start_datetime, end_datetime, performer_name, repository_name, namespace_name, filter_kinds, page_token, max_page_count) else: page = logs_model.lookup_logs(start_datetime, end_datetime, performer_name, repository_name, namespace_name, filter_kinds, page_token, max_page_count) assert page == expected_page if expected_request: mock_elasticsearch.search_after.assert_called_with(expected_request)
def test_log_action( unlogged_pulls_ok, kind_name, namespace_name, repository, repository_name, timestamp, index_response, expected_request, throws, app_config, logs_model, mock_elasticsearch, mock_db_model, mock_random_id, ): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) mock_elasticsearch.index = Mock(return_value=index_response) app_config["ALLOW_PULLS_WITHOUT_STRICT_LOGGING"] = unlogged_pulls_ok configure(app_config) performer = Mock(id=1) ip = "192.168.1.1" metadata = { "key": "value", "time": parse("2018-03-08T03:30"), "😂": "😂👌👌👌👌" } if throws: with pytest.raises(Exception): logs_model.log_action( kind_name, namespace_name, performer, ip, metadata, repository, repository_name, timestamp, ) else: logs_model.log_action( kind_name, namespace_name, performer, ip, metadata, repository, repository_name, timestamp, ) mock_elasticsearch.index.assert_called_with(*expected_request)
def test_kafka_logs_producers(logs_model, mock_elasticsearch, mock_db_model, kafka_logs_producer_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) producer_config = kafka_logs_producer_config with patch('kafka.client_async.KafkaClient.check_version'), patch( 'kafka.KafkaProducer.send') as mock_send: configure(producer_config) logs_model.log_action('pull_repo', 'user1', Mock(id=1), '192.168.1.1', {'key': 'value'}, None, 'repo1', parse("2019-01-01T03:30")) mock_send.assert_called_once()
def test_kinesis_logs_producers(logs_model, mock_elasticsearch, mock_db_model, kinesis_logs_producer_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) producer_config = kinesis_logs_producer_config with patch('botocore.endpoint.EndpointCreator.create_endpoint'), \ patch('botocore.client.BaseClient._make_api_call') as mock_send: configure(producer_config) logs_model.log_action('pull_repo', 'user1', Mock(id=1), '192.168.1.1', {'key': 'value'}, None, 'repo1', parse("2019-01-01T03:30")) # Check that a PutRecord api call is made. # NOTE: The second arg of _make_api_call uses a randomized PartitionKey mock_send.assert_called_once_with(u'PutRecord', mock_send.call_args_list[0][0][1])
def test_get_aggregated_log_counts(start_datetime, end_datetime, performer_name, repository_name, namespace_name, filter_kinds, search_response, expected_request, expected_counts, throws, logs_model, mock_elasticsearch, mock_db_model, app_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) mock_elasticsearch.search_aggs = Mock(return_value=search_response) configure(app_config) if throws: with pytest.raises(Exception): logs_model.get_aggregated_log_counts(start_datetime, end_datetime, performer_name, repository_name, namespace_name, filter_kinds) else: counts = logs_model.get_aggregated_log_counts(start_datetime, end_datetime, performer_name, repository_name, namespace_name, filter_kinds) assert set(counts) == set(expected_counts) if expected_request: mock_elasticsearch.search_aggs.assert_called_with(expected_request)
def test_kafka_logs_producers(logs_model, mock_elasticsearch, mock_db_model, kafka_logs_producer_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) producer_config = kafka_logs_producer_config with patch("kafka.client_async.KafkaClient.check_version"), patch( "kafka.KafkaProducer.send") as mock_send: configure(producer_config) logs_model.log_action( "pull_repo", "user1", Mock(id=1), "192.168.1.1", {"key": "value"}, None, "repo1", parse("2019-01-01T03:30"), ) mock_send.assert_called_once()
def test_kinesis_logs_producers(logs_model, mock_elasticsearch, mock_db_model, kinesis_logs_producer_config): mock_elasticsearch.template = Mock(return_value=DEFAULT_TEMPLATE_RESPONSE) producer_config = kinesis_logs_producer_config with patch("botocore.endpoint.EndpointCreator.create_endpoint"), patch( "botocore.client.BaseClient._make_api_call") as mock_send: configure(producer_config) logs_model.log_action( "pull_repo", "user1", Mock(id=1), "192.168.1.1", {"key": "value"}, None, "repo1", parse("2019-01-01T03:30"), ) # Check that a PutRecord api call is made. # NOTE: The second arg of _make_api_call uses a randomized PartitionKey mock_send.assert_called_once_with("PutRecord", mock_send.call_args_list[0][0][1])
_v2_key_path = os.path.join(OVERRIDE_CONFIG_DIRECTORY, DOCKER_V2_SIGNINGKEY_FILENAME) if os.path.exists(_v2_key_path): docker_v2_signing_key = RSAKey().load(_v2_key_path) else: docker_v2_signing_key = RSAKey(key=RSA.generate(2048)) # Configure the database. if app.config.get("DATABASE_SECRET_KEY") is None and app.config.get("SETUP_COMPLETE", False): raise Exception("Missing DATABASE_SECRET_KEY in config; did you perhaps forget to add it?") database.configure(app.config) model.config.app_config = app.config model.config.store = storage model.config.register_repo_cleanup_callback(tuf_metadata_api.delete_metadata) secscan_model.configure(app, instance_keys, storage) secscan_model.register_model_cleanup_callbacks(model.config) logs_model.configure(app.config) @login_manager.user_loader def load_user(user_uuid): logger.debug("User loader loading deferred user with uuid: %s", user_uuid) return LoginWrappedDBUser(user_uuid) get_app_url = partial(get_app_url, app.config)