Ejemplo n.º 1
0
def test_pre_parse_s3_debug(s3_mock, log_mock, _):
    """S3Payload - Pre Parse, Debug On"""
    # Cache the logger level
    log_level = LOGGER.getEffectiveLevel()

    # Increase the logger level to debug
    LOGGER.setLevel(logging.DEBUG)

    records = ['_first_line_test_' * 10, '_second_line_test_' * 10]

    s3_mock.side_effect = [((100, records[0]), (200, records[1]))]

    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)
    S3Payload.s3_object_size = 350

    _ = [_ for _ in s3_payload.pre_parse()]

    calls = [
        call(
            'Processed %s S3 records out of an approximate total of %s '
            '(average record size: %s bytes, total size: %s bytes)', 100, 350,
            1, 350),
        call(
            'Processed %s S3 records out of an approximate total of %s '
            '(average record size: %s bytes, total size: %s bytes)', 200, 350,
            1, 350)
    ]

    log_mock.assert_has_calls(calls)

    # Reset the logger level and stop the patchers
    LOGGER.setLevel(log_level)
Ejemplo n.º 2
0
def test_s3_object_too_large():
    """S3Payload - S3ObjectSizeError, Object too Large"""
    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)
    S3Payload.s3_object_size = (128 * 1024 * 1024) + 10

    s3_payload._download_object('region', 'bucket', 'key')
Ejemplo n.º 3
0
def test_pre_parse_s3_debug(s3_mock, log_mock, _):
    """S3Payload - Pre Parse, Debug On"""
    with patch.object(payload, 'LOGGER_DEBUG_ENABLED', True):

        records = ['_first_line_test_' * 10, '_second_line_test_' * 10]

        s3_mock.side_effect = [((100, records[0]), (200, records[1]))]

        raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
        s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)
        S3Payload.s3_object_size = 350

        _ = [_ for _ in s3_payload.pre_parse()]

        calls = [
            call(
                'Processed %s S3 records out of an approximate total of %s '
                '(average record size: %s bytes, total size: %s bytes)', 100,
                350, 1, 350),
            call(
                'Processed %s S3 records out of an approximate total of %s '
                '(average record size: %s bytes, total size: %s bytes)', 200,
                350, 1, 350)
        ]

        log_mock.assert_has_calls(calls)
Ejemplo n.º 4
0
def test_s3_download_object_zero_size(*_):
    """S3Payload - Download Object of Zero Size"""
    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name', 0)
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)

    assert_is_none(
        s3_payload._download_object('us-east-1', 'unit_bucket_name',
                                    'unit_key_name'))
Ejemplo n.º 5
0
def test_get_object(log_mock, _):
    """S3Payload - Get S3 Info from Raw Record"""
    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)

    s3_payload._get_object()
    log_mock.assert_called_with(
        'Pre-parsing record from S3. Bucket: %s, Key: %s, Size: %d',
        'unit_bucket_name', 'unit_key_name', 100)
Ejemplo n.º 6
0
def test_get_object_ioerror(download_object_mock):
    """S3Payload - IOError Test"""
    download_object_mock.side_effect = IOError('No space left on device')
    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)

    result = s3_payload._get_object()

    assert_equal(result, None)
Ejemplo n.º 7
0
def test_s3_download_object(log_mock, *_):
    """S3Payload - Download Object"""
    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)
    s3_payload._download_object('us-east-1', 'unit_bucket_name',
                                'unit_key_name')

    assert_equal(log_mock.call_args_list[1][0][0],
                 'Completed download in %s seconds')
Ejemplo n.º 8
0
def test_s3_download_object(*_):
    """S3Payload - Download Object"""
    key = 'test/unit/s3-object.gz'
    raw_record = make_s3_raw_record('unit_bucket_name', key)
    s3_payload = load_stream_payload('s3', key, raw_record)
    S3Payload.s3_object_size = (1024 * 1024)
    downloaded_path = s3_payload._download_object('us-east-1',
                                                  'unit_bucket_name', key)

    assert_true(downloaded_path.endswith('test-unit-s3-object.gz'))
Ejemplo n.º 9
0
def test_pre_parse_s3(s3_mock, *_):
    """S3Payload - Pre Parse"""
    records = ['{"record01": "value01"}', '{"record02": "value02"}']
    s3_mock.side_effect = [((0, records[0]), (1, records[1]))]

    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)

    for index, record in enumerate(s3_payload.pre_parse()):
        assert_equal(record.pre_parsed_record, records[index])
Ejemplo n.º 10
0
def test_s3_download_object_mb(log_mock, *_):
    """S3Payload - Download Object, Size in MB"""
    raw_record = make_s3_raw_record('unit_bucket_name', 'unit_key_name')
    s3_payload = load_stream_payload('s3', 'unit_key_name', raw_record)
    S3Payload.s3_object_size = (127.8 * 1024 * 1024)
    s3_payload._download_object('us-east-1', 'unit_bucket_name', 'unit_key_name')

    assert_equal(log_mock.call_args_list[0],
                 call('[S3Payload] Starting download from S3: %s/%s [%s]',
                      'unit_bucket_name', 'unit_key_name', '127.8MB'))

    assert_equal(log_mock.call_args_list[1][0][0], 'Completed download in %s seconds')