def test_build_keycolumnlist(mocker, setup): (extractor, mockdb, mockargv, mock_producer, start_scn, end_scn) = setup query_results_config = {'fetchall.return_value': []} mock_query_results = mocker.Mock(**query_results_config) db_config = {'execute_query.return_value': mock_query_results} mockdb = mocker.Mock(**db_config) mock_audit_factory = unittest_utils.build_mock_audit_factory(mocker) extractor = OracleCdcExtractor(mockdb, mockargv, mock_audit_factory) extractor.build_keycolumnlist(['schemaA', 'schemaB'], ['tableA', 'tableB']) mockdb.assert_has_calls([ mocker.call.execute_query( "SELECT cons.table_name tabname, LOWER(column_name) colname " "FROM all_constraints cons, all_cons_columns col " "WHERE cons.owner = col.owner " "AND cons.constraint_name = col.constraint_name " "AND UPPER(cons.owner) IN ('SCHEMAA', 'SCHEMAB') " "AND UPPER(cons.table_name) IN ('TABLEA', 'TABLEB') " "AND cons.constraint_type IN ('P', 'U') " "ORDER BY 1, col.position", 1000), mocker.call.execute_query(mocker.ANY).fetchall() ])
def test_processGame_writes_andFail(self, mocker): (mockS3, mockSQS, mockDynamo, mockContext, mockGameController, mockTime) = getMocks(mocker) mocker.patch.object(L, 's3', mockS3) mocker.patch.object(L, 'dynamodb', mockDynamo) mocker.patch.object(L, 'GameController', mockGameController) mocker.patch.object(L, 'getDatetime', mockTime) mockBatch = mocker.Mock() mockTable = mocker.MagicMock() mockDynamo.Table.return_value = mockTable mockTable.batch_writer.return_value.__enter__.return_value = mockBatch mockBatch.put_item.side_effect = mocker.Mock( side_effect=Exception('put item failed')) pg = ParsedGame(1) mockGameController.processPGNText.return_value = [pg] with pytest.raises(Exception) as e_info: L.lambda_handler(lambdaEvent, mockContext) item = mockSuccessDbItem(pg) mockBatch.put_item.assert_called_once() mockBatch.put_item.assert_called_once_with(Item=item) assert (mockDynamo.Table.call_count == 2) mockDynamo.Table.assert_any_call('chess_games') mockDynamo.Table.assert_any_call(mock_tablePgnFailed) failedItem = mockFailedDbItem('writing') mockTable.put_item.assert_called_once() mockTable.put_item.assert_called_once_with(Item=failedItem)
def test_create_snowflake_connection(mocker): # noqa: F811 # Mock out the snowflake connection method w/o mocking out the helper method. mocker.patch.object(snowflake.snowflake.connector, 'connect') mock_cursor = mocker.Mock() mock_connection = mocker.Mock() mock_connection.cursor.return_value = mock_cursor snowflake.snowflake.connector.connect.return_value = mock_connection # Mock the key decryption. mocker.patch.object(snowflake.serialization, 'load_pem_private_key') mock_key = mocker.Mock() mock_key.private_bytes.return_value = 1234 snowflake.serialization.load_pem_private_key.return_value = mock_key # Call the connection method. snowflake.create_snowflake_connection(credentials={ "private_key": "this_is_an_encrypted_private_key", "private_key_passphrase": "passphrase_for_the_private_key", "user": "******", "account": "company-cloud-region" }, role="test_role") snowflake.snowflake.connector.connect.assert_called_with( account='company-cloud-region', autocommit=False, private_key=1234, user='******', warehouse=None, password=None, ) mock_cursor.execute.assert_has_calls(( mocker.call("USE ROLE test_role"), mocker.call("ALTER SESSION SET TIMEZONE = 'UTC'"), ))
def mocked_submission_participant_app(request, mocker): global dashboardTestMockObjects # Create the flask app app = conftest.create_basic_app() # Create some mock objects and chain the mock calls def mock_all(): return [ mocker.MagicMock(publication_recid=1, invitation_cookie='c00kie1', role='TestRole1'), mocker.MagicMock(publication_recid=2, invitation_cookie='c00kie2', role='TestRole2') ] mockFilter = mocker.Mock(all=mock_all) mockQuery = mocker.Mock(filter=lambda a, b, c, d: mockFilter) mockSubmissionParticipant = mocker.Mock(query=mockQuery) # Patch some methods called from hepdata.modules.dashboard.api so they return mock values dashboardTestMockObjects['submission'] = \ mocker.patch('hepdata.modules.dashboard.api.SubmissionParticipant', mockSubmissionParticipant) mocker.patch('hepdata.modules.dashboard.api.get_record_by_id', lambda x: {'title': 'Test Title 1' if x <= 1 else 'Test Title 2'}) mocker.patch('hepdata.modules.dashboard.api.get_latest_hepsubmission', mocker.Mock(coordinator=101)) mocker.patch('hepdata.modules.dashboard.api.get_user_from_id', mocker.Mock(return_value=dashboardTestMockObjects['user'])) mocker.patch('hepdata.modules.dashboard.api.decode_string', lambda x: "decoded " + str(x)) # Do the rest of the app setup app_generator = conftest.setup_app(app) for app in app_generator: yield app
def setup(mocker): """ setup function """ context = mock_context(mocker) builder = mocker.Mock() slot_builder = mocker.Mock() return context, builder, slot_builder
def test_error_consumer_poll(mocker, setup): (mock_applier) = setup mock_message_config = { "value.return_value": const.KILLED, "error.return_value": None } mock_message = mocker.Mock(**mock_message_config) def poll_exception_side_effect(timeout): global poll_calls poll_calls += 1 print("poll_calls = {}".format(poll_calls)) if poll_calls == 1: raise Exception("Poll failed!") else: return mock_message mock_avro_consumer_config = { "poll.side_effect": poll_exception_side_effect, "committed.return_value": [TopicPartition('topic', 0, 0)] } mock_avro_consumer = mocker.Mock(**mock_avro_consumer_config) mock_avro_consumer_constructor = mocker.patch( "data_pipeline.stream.kafka_consumer.AvroConsumer") mock_avro_consumer_constructor.return_value = mock_avro_consumer kafka_consumer = KafkaConsumer('broker', 'group', 'topic', 'schema_reg_url', mock_applier) kafka_consumer.consumer_loop()
def setup_no_redolog_dict(tmpdir, mocker): mockargv_config = unittest_utils.get_default_argv_config(tmpdir) mockargv_config = utils.merge_dicts( mockargv_config, { 'donotload': False, 'donotsend': False, 'streamhost': 'host', 'streamchannel': 'channel', 'streamschemafile': 'file', 'streamschemahost': 'host', 'sourcedictionary': 'redolog' }) mockargv = mocker.Mock(**mockargv_config) unittest_utils.setup_logging(mockargv.workdirectory) db_config = {'execute_query.side_effect': execute_query_no_redolog_se} mockdb = mocker.Mock(**db_config) mock_audit_factory = unittest_utils.build_mock_audit_factory(mocker) unittest_utils.mock_get_prev_run_cdcs(mocker) mock_producer = unittest_utils.mock_build_kafka_producer(mocker) yield (OracleCdcExtractor(mockdb, mockargv, mock_audit_factory), mockdb, mockargv, mock_producer, 1, 2)
def test_callsDynamoDBWithoutParsing(self, mocker): dynamoMock = mocker.Mock() tableMock = mocker.Mock() timeMock = mocker.Mock() mocker.patch.object(L, 'dynamodb', dynamoMock) mocker.patch.object(L, 'getDatetime', timeMock) dynamoMock.Table.return_value = tableMock timeMock.return_value = _time L.lambda_handler(lambdaEvent_bad, None) dynamoMock.Table.assert_called_once_with(mock_tablePgnFailed) ## FAIL TO PARSE MESSAGE VALUES _bucket = '_bucket_' _filename = '_filename_' _from = '_from_' _to = '_to_' tableMock.put_item.assert_called_with( Item={ 'id': '{}_{}_{}_{}_DLQ'.format(_bucket, _filename, _from, _to), 'bucket': _bucket, 'filename': _filename, 'lineFrom': _from, 'lineTo': _to, 'reason': "Was in DLQ", 'datetime': _time })
def mock_mysql_connection(mocker): # noqa: F811 # Mock the Snowflake connection and cursor. mocker.patch.object(utils_mysql, 'create_mysql_connection') mock_cursor = mocker.Mock() mock_connection = mocker.Mock() mock_connection.cursor.return_value = mock_cursor utils_mysql.create_mysql_connection.return_value = mock_connection return mock_connection
def mock_sf_connection(mocker): # noqa: F811 # Mock the Snowflake connection and cursor. mocker.patch.object(snowflake, 'create_snowflake_connection') mock_cursor = mocker.Mock() mock_connection = mocker.Mock() mock_connection.cursor.return_value = mock_cursor snowflake.create_snowflake_connection.return_value = mock_connection return mock_connection
def test_callDynamoDB(self, mocker): dynamoMock = mocker.Mock() tableMock = mocker.Mock() mocker.patch.object(L, 'dynamodb', dynamoMock) dynamoMock.Table.return_value = tableMock L.lambda_handler(lambdaEvent, None) dynamoMock.Table.assert_called_once_with(mock_tablePgnFailed) tableMock.put_item.assert_called_once()
def getMocks(mocker): mockS3 = mocker.Mock() mockSQS = mocker.Mock() mockPGNFile = mocker.Mock() mockContext = mocker.Mock() mockS3.get_object.return_value = {'Body': mocker.Mock()} mockContext.get_remaining_time_in_millis.return_value = 10000 return (mockS3, mockSQS, mockContext, mockPGNFile)
def setup(tmpdir, mocker): mockargv_config = utils.get_default_argv_config(tmpdir) mockargv = mocker.Mock(**mockargv_config) pc_config = {'insert.return_value': None, 'update.return_value': None} mock_pc = mocker.Mock(**pc_config) af_config = {'build_process_control.return_value': mock_pc} mock_audit_factory = mocker.Mock(**af_config) utils.mock_build_kafka_producer(mocker) yield (mockargv, mock_audit_factory)
def test_seek_to_end(mocker, setup): global poll_calls poll_calls = 0 end_of_queue_offset = 1234 (mock_applier) = setup mock_message_config = { "value.return_value": None, "error.return_value": None, "offset.return_value": end_of_queue_offset } mock_message = mocker.Mock(**mock_message_config) def poll_message_side_effect(timeout): global poll_calls poll_calls += 1 # The first polled message should be the last message in the queue if poll_calls == 1: return mock_message else: # Indicates that no more messages are left in the queue return None mock_avro_consumer_config = { "poll.side_effect": poll_message_side_effect, "committed.return_value": [TopicPartition('topic', 0, 0)] } mock_avro_consumer = mocker.Mock(**mock_avro_consumer_config) mock_avro_consumer_constructor = mocker.patch( "data_pipeline.stream.kafka_consumer.AvroConsumer") mock_avro_consumer_constructor.return_value = mock_avro_consumer kafka_consumer = KafkaConsumer('broker', 'group', 'topic', 'schema_reg_url', mock_applier) # Simulate receiving a stats callback with the eof_offset kafka_consumer._eof_offset = end_of_queue_offset kafka_consumer.seek_to_end(1) expected_commit_topic_partition = TopicPartition('topic', 0, end_of_queue_offset) mock_avro_consumer.commit.assert_called_once_with( async=False, offsets=[expected_commit_topic_partition], )
def setup(tmpdir, mocker): mockargv_config = unittest_utils.get_default_argv_config(tmpdir) mockargv = mocker.Mock(**mockargv_config) unittest_utils.setup_logging(mockargv.workdirectory) pc_config = {'insert.return_value': None, 'update.return_value': None} mock_pc = mocker.Mock(**pc_config) mock_applier_config = { "apply.side_effect": apply_side_effect, "process_control.return_value": mock_pc, } mock_applier = mocker.Mock(**mock_applier_config) yield (mock_applier)
def test_predict(mocker): mocked_pickle = mocker.patch('pickle.loads') mocked_pickle.predict_proba.return_value = [[0]] mocked_arg_max = mocker.patch('numpy.argmax') mocked_arg_max.return_value = 0 mocked_le = mocker.Mock() mocked_le.classes_ = [0] mocked_rectangle = mocker.Mock('cv2.rectangle') mocked_put_text = mocker.Mock('cv2.putText') recognize_faces.predict({ 'confidence': 1, 'threshold': .5 }, mocker.Mock(), mocked_pickle, mocked_le, [], 0, 0, 216, 216) mocked_rectangle.assert_not_called() mocked_put_text.assert_not_called()
def test_post_save_product_category_signal_called_methods_order(mocker): product_category_mock = mocker.Mock() product_category_mock_root = mocker.Mock() product_category_mock.root_category = product_category_mock_root product_category_mock.attributes_json = {'test': 1} product_category_mock.attributes_old_json = {'test': 2} product_category_mock.is_root = True calls = [ mocker.call.update_products_after_making_category_root(), mocker.call.update_products_after_category_attr_changes(), mocker.call.update_subcategories_after_category_attr_changes(), mocker.call.merge_attributes_ols_json() ] post_save_product_category(mocker.Mock, product_category_mock) product_category_mock.assert_has_calls(calls)
def test_init_video_stream(mocker): mocked_video_stream = mocker.patch('cv2.VideoCapture') tmp_mock = mocker.Mock() tmp_mock.get.return_value = "123" mocked_video_stream.return_value = tmp_mock app_det.init_video_stream({'input':'123'}) mocked_video_stream.assert_any_call('123')
def test_default_next_offset_to_read(tmpdir, mocker): """Override the default setup because we want to mock out the the query result of _get_last_apply_record to return nothing """ (oracle_processor, mock_target_db, mockargv, mock_audit_factory, mock_audit_db) = cdc_utils.setup_dependencies(tmpdir, mocker, None, None, None) def execute_query_se(sql, arraysize, bind_variables): print("Query executed: {}".format(sql)) if "SELECT executor_run_id, executor_status, status" in sql: mock_query_results_config = {'fetchone.return_value': None} return mocker.Mock(**mock_query_results_config) raise Exception("Query '{}' is not supported in mock!".format(sql)) mock_db_config = {'execute_query.side_effect': execute_query_se} mock_db = mocker.Mock(**mock_db_config) mock_db_factory = mocker.patch("data_pipeline.audit.factory.db_factory") mock_db_factory.build.return_value = mock_db postgres_applier = PostgresCdcApplier(oracle_processor, mock_target_db, mockargv, mock_audit_factory) offset = postgres_applier.next_offset_to_read assert offset != confluent_kafka.OFFSET_END assert offset is None
def execute_query_se(sql, arraysize, bind_variables): print("Query executed: {}".format(sql)) if "SELECT executor_run_id, executor_status, status" in sql: mock_query_results_config = {'fetchone.return_value': None} return mocker.Mock(**mock_query_results_config) raise Exception("Query '{}' is not supported in mock!".format(sql))
def setup_commit_point_tests(tmpdir, mocker): (oracle_processor, mock_target_db, mockargv, mock_audit_factory, mock_audit_db) = cdc_utils.setup_dependencies(tmpdir, mocker, None, None, None) at_auditcommitpoint_mock = mocker.patch.object(PostgresCdcApplier, 'at_auditcommitpoint') at_auditcommitpoint_mock.return_value = True is_end_of_batch_mock = mocker.patch( 'data_pipeline.applier.applier._is_end_of_batch') is_end_of_batch_mock.return_value = True end_batch_mock = mocker.patch.object(PostgresCdcApplier, '_end_batch') end_batch_mock.return_value = True can_apply_mock = mocker.patch.object(PostgresCdcApplier, '_can_apply') can_apply_mock.return_valule = True oracle_message = OracleMessage() config = { 'value.return_value': oracle_message.serialise(), 'offset.return_value': 1 } mock_message = mocker.Mock(**config) yield (PostgresCdcApplier(oracle_processor, mock_target_db, mockargv, mock_audit_factory), mock_target_db, mock_message, mock_audit_db)
def test_processGame_writes_1(self, mocker): (mockS3, mockSQS, mockDynamo, mockContext, mockGameController, mockTime) = getMocks(mocker) mocker.patch.object(L, 's3', mockS3) mocker.patch.object(L, 'dynamodb', mockDynamo) mocker.patch.object(L, 'GameController', mockGameController) mocker.patch.object(L, 'getDatetime', mockTime) mockBatch = mocker.Mock() mockTable = mocker.MagicMock() mockTable.batch_writer.return_value.__enter__.return_value = mockBatch mockDynamo.Table.return_value = mockTable pg = ParsedGame(1) mockGameController.processPGNText.return_value = [pg] L.lambda_handler(lambdaEvent, mockContext) mockBatch.put_item.assert_called_once() mockBatch.put_item.assert_called_once_with(Item=mockSuccessDbItem(pg)) assert (mockDynamo.Table.call_count == 2) mockDynamo.Table.assert_any_call('chess_games') mockDynamo.Table.assert_any_call(mock_tablePgnSucceeded) mockTable.put_item.assert_called_once() mockTable.put_item.assert_called_once_with(Item=mockAddedDbItem(1))
def execute_tests(postgres_applier, data, mocker, mock_target_db, mock_argv): print("Running test: '{}'".format(data.description)) for record_type, payload, record_count, expect_batch_committed in zip( data.input_record_types, data.input_payloads, data.input_record_counts, data.expect_batch_committed): message = FileStreamMessage() message.record_type = record_type message.table_name = data.input_table_name message.payload = payload message.record_count = record_count config = {'value.return_value': message.serialise()} mock_message = mocker.Mock(**config) batch_committed = postgres_applier.apply(mock_message.value()) f = open(mock_argv.outputfile, 'r') for line in f: print("initsync outputfile contents={}".format(line)) assert batch_committed == expect_batch_committed assert mock_target_db.commit.call_count == data.expect_commit_called_times assert mock_target_db.copy.call_count == data.expect_copy_called_times
def test_callback_with_relevant_path_stripper_success(mocker): """ Check base path is correctly removed from found file/ directory """ mock_func = mocker.Mock() file_tracking = FileTrackingImpl('PATH TO DIR', mock_func) file_tracking.callback_with_relevant_path_stripper( 'PASS THROUGH ACTION', 'PATH TO DIR ONLY_THIS_SHOULD_REMAIN', 'PASS THROUGH FILE', 'WE_SHOULD_ALL_REMAIN_PATH_TO_DIR') mock_func.assert_called_with('PASS THROUGH ACTION', ' ONLY_THIS_SHOULD_REMAIN', 'PASS THROUGH FILE', 'WE_SHOULD_ALL_REMAIN_PATH_TO_DIR') file_tracking.callback_with_relevant_path_stripper('PASS THROUGH ACTION', 'PATH TO 123 DIR', 'PASS THROUGH FILE', 'PATH TO DIR') mock_func.assert_called_with('PASS THROUGH ACTION', 'PATH TO 123 DIR', 'PASS THROUGH FILE', '') file_tracking.callback_with_relevant_path_stripper( '1', 'PATH TO', '2', 'PATH TO DIR/SOME FILE') mock_func.assert_called_with('1', 'PATH TO', '2', '/SOME FILE')
def test_processGame_writes_duplicated_id(self, mocker): (mockS3, mockSQS, mockDynamo, mockContext, mockGameController, mockTime) = getMocks(mocker) mocker.patch.object(L, 's3', mockS3) mocker.patch.object(L, 'dynamodb', mockDynamo) mocker.patch.object(L, 'GameController', mockGameController) mocker.patch.object(L, 'getDatetime', mockTime) mockBatch = mocker.Mock() mockTable = mocker.MagicMock() mockTable.batch_writer.return_value.__enter__.return_value = mockBatch mockDynamo.Table.return_value = mockTable pg1 = ParsedGame(1) pg2 = ParsedGame(1) mockGameController.processPGNText.return_value = [pg1, pg2] L.lambda_handler(lambdaEvent, mockContext) mockDynamo.Table.assert_any_call('chess_games') mockDynamo.Table.assert_any_call(mock_tablePgnSucceeded) mockDynamo.Table.assert_any_call(mock_tableChessGamesFailed) # good game assert (mockBatch.put_item.call_count == 1) mockBatch.put_item.assert_any_call(Item=mockSuccessDbItem(pg1)) # file things assert (mockTable.put_item.call_count == 2) mockTable.put_item.assert_any_call(Item=mockAddedDbItem(1)) mockTable.put_item.assert_any_call( Item=mockFailedDbItem('Duplicated Key: 1'))
def test_end_of_batch_without_start(mocker, setup): (postgres_applier, mock_target_db, mock_audit_db) = setup oracle_message = OracleMessage() oracle_message.record_type = const.END_OF_BATCH config = {'value.return_value': oracle_message.serialise()} mock_message = mocker.Mock(**config)
def test_sync_ros_properties(mocker, context_wrapper_fixture: ContextWrapper): with LogCapture() as capture: rclpy_mock = mocker.Mock() mocker.patch.dict('sys.modules', {'rclpy': rclpy_mock}) from ravestate_ros2 import sync_ros_properties result = sync_ros_properties(context_wrapper_fixture) expected = "ros2-node-name is not set. Shutting down ravestate_ros2" capture.check_present((f"{FILE_NAME}", '\x1b[1;31mERROR\x1b[0m', f"{PREFIX} {PREFIX} {expected}"))
def mock_context(mocker): """mock_context""" context = mocker.Mock() context.aws_request_id = 12345 context.get_remaining_time_in_millis.return_value = 100000.0 context.invoked_function_arn = \ 'arn:aws:lambda:us-east-1:773592622512:function:elliott-helloworld' return context
def test_cannotReadS3Object(self, mocker): (mockS3, mockSQS, mockContext, _) = getMocks(mocker) mocker.patch.object(L, 's3', mockS3) mockS3.get_object.side_effect = mocker.Mock( side_effect=Exception('impossible to read')) with pytest.raises(Exception) as e_info: L.lambda_handler(lambdaEvent, mockContext)
def test_sync_ros_properties(mocker, context_wrapper_fixture: ContextWrapper): with LogCapture(attributes=strip_prefix) as capture: rclpy_mock = mocker.Mock() mocker.patch.dict('sys.modules', {'rclpy': rclpy_mock}) from ravestate_ros2 import sync_ros_properties result = sync_ros_properties(context_wrapper_fixture) expected = "ros2-node-name is not set. Shutting down ravestate_ros2" capture.check_present(expected)