Ejemplo n.º 1
0
def test_process_file_successful(patch_storage, patch_rabbit, tmp_path):
    schema = {'header_1': [], 'header_2': []}
    header = ','.join(key for key in schema.keys())
    mock_processor = setup_mock_processor(schema, None)
    mock_processor.build_event_messages.side_effect = lambda row: [row]
    bulk_processor = BulkProcessor(mock_processor)
    bulk_processor.working_dir = tmp_path
    bulk_processor.rabbit = patch_rabbit
    test_file = RESOURCE_PATH.joinpath('bulk_test_file_success.csv')

    success_file, error_file, error_detail_file = bulk_processor.initialise_results_files(
        test_file.name)
    success_count, failure_count = bulk_processor.process_file(
        test_file, success_file, error_file, error_detail_file)

    assert not failure_count, 'Should have no processing errors'
    assert success_count == 1, 'Should successfully process one row'

    assert success_file.read_text() == test_file.read_text()
    assert error_file.read_text() == header + '\n'
    assert error_detail_file.read_text() == HEADER_IS_VALID

    patch_rabbit.publish_message.assert_called_once_with(
        message=json.dumps({
            'header_1': 'foo',
            'header_2': 'bar'
        }),
        content_type='application/json',
        headers=None,
        exchange=mock_processor.exchange,
        routing_key=mock_processor.routing_key)
Ejemplo n.º 2
0
def test_process_file_encoding_failure(patch_storage, patch_rabbit, tmp_path):
    schema = {'header_1': [], 'header_2': []}
    header = ','.join(key for key in schema.keys())
    mock_processor = setup_mock_processor(schema, None)
    bulk_processor = BulkProcessor(mock_processor)
    bulk_processor.working_dir = tmp_path
    test_file = RESOURCE_PATH.joinpath('bulk_test_file_encoding_failure.csv')

    success_file, error_file, error_detail_file = bulk_processor.initialise_results_files(
        test_file.name)
    success_count, failure_count = bulk_processor.process_file(
        test_file, success_file, error_file, error_detail_file)

    assert failure_count == 1, 'Should have one failure when it tries to decode the file'
    assert not success_count, 'Should not successfully process any rows'

    assert success_file.read_text() == header + '\n'
    assert 'Invalid file encoding, requires utf-8' in error_detail_file.read_text(
    )
    patch_rabbit.publish_message.assert_not_called()
Ejemplo n.º 3
0
def test_process_file_success_failure_mix(patch_storage, patch_rabbit,
                                          tmp_path):
    error_message_description = 'tests value invalid failure message'
    error_detail_message = '[Column: header_1, Error: tests value invalid failure message]'

    schema = {
        'header_1': [no_invalid_validator(message=error_message_description)],
        'header_2': []
    }
    header = ','.join(key for key in schema.keys())
    mock_processor = setup_mock_processor(schema, None)
    mock_processor.build_event_messages.side_effect = lambda row: [row]
    bulk_processor = BulkProcessor(mock_processor)
    bulk_processor.working_dir = tmp_path
    bulk_processor.rabbit = patch_rabbit
    test_file = RESOURCE_PATH.joinpath(
        'bulk_test_file_success_failure_mix.csv')

    success_file, error_file, error_detail_file = bulk_processor.initialise_results_files(
        test_file.name)
    success_count, failure_count = bulk_processor.process_file(
        test_file, success_file, error_file, error_detail_file)

    assert failure_count == 1, 'Should fail to process one row'
    assert success_count == 1, 'Should successfully process one row'

    assert success_file.read_text() == header + '\n' + 'foo,bar' + '\n'
    assert error_file.read_text() == header + '\n' + 'invalid,bar' + '\n'
    assert error_detail_file.read_text(
    ) == HEADER_IS_VALID + error_detail_message + '\n'

    patch_rabbit.publish_message.assert_called_once_with(
        message=json.dumps({
            'header_1': 'foo',
            'header_2': 'bar'
        }),
        content_type='application/json',
        headers=None,
        exchange=mock_processor.exchange,
        routing_key=mock_processor.routing_key)