def test_cleanup(self, subproc_mock, os_mock): """S3Payload - Cleanup""" with patch.dict(os.environ, {'LAMBDA_RUNTIME_DIR': '/var/runtime'}): S3Payload._cleanup() subproc_mock.assert_called_with( ['shred', '--force', '--iterations=1', '--remove', self.temp_file] ) os_mock.assert_called_with(self.temp_dir)
def setup(self): """S3Payload - Setup""" self._bucket = 'test_bucket_name' self._key = 'test_object_name' self._size = 100 self._record = self._record_data() self._payload = S3Payload(None, self._record)
def test_jsonlines_reader_fallback(self): """S3Payload - JSON Lines Reader, Fallback""" with tempfile.SpooledTemporaryFile(max_size=10*1024) as reader: reader.write('non-json-value\n'.encode()) reader.seek(0) line_reader = S3Payload._jsonlines_reader(reader) assert_equal(reader == line_reader, True)
def test_read_downloaded_object_fallback(self): """S3Payload - Read Downloaded Object, Fallback""" value = 'non-json-value\n'.encode() with tempfile.SpooledTemporaryFile(max_size=10*1024) as reader: reader.write(value) reader.seek(0) read_lines = list(S3Payload._read_downloaded_object(reader)) assert_equal(read_lines, [(1, value)])
def test_read_downloaded_object(self): """S3Payload - Read Downloaded Object""" record = {'key': 'value'} with tempfile.SpooledTemporaryFile(max_size=10*1024) as reader: reader.write(json.dumps(record, indent=2).encode()) reader.seek(0) read_lines = list(S3Payload._read_downloaded_object(reader)) assert_equal(read_lines, [(1, record)])
def test_jsonlines_reader(self): """S3Payload - JSON Lines Reader""" record = {'key': 'value'} json_line = (json.dumps(record, separators=(',', ':')) + '\n').encode() with tempfile.SpooledTemporaryFile(max_size=10 * 1024) as reader: reader.writelines([json_line, json_line]) reader.seek(0) line_reader = S3Payload._jsonlines_reader(reader) assert_equal(reader != line_reader, True)
def test_gz_reader_non_gz(self): """S3Payload - GZ Reader, Non-gzip""" record = {'key': 'value'} json_line = (json.dumps(record, separators=(',', ':')) + '\n').encode() with tempfile.SpooledTemporaryFile(max_size=10 * 1024) as reader: reader.writelines([json_line, json_line]) reader.seek(0) non_gz_reader = S3Payload._gz_reader(reader) assert_equal(reader == non_gz_reader, True)
def test_read_file(self): """S3Payload - Read File""" value = 'test_data'.encode() boto3.resource('s3').Bucket(self._bucket).create() boto3.resource('s3').Bucket(self._bucket).put_object(Key=self._key, Body=value) payload = S3Payload(None, self._record) read_lines = list(payload._read_file()) assert_equal(read_lines, [(1, value)])
def test_gz_reader(self): """S3Payload - GZ Reader""" record = {'key': 'value'} json_line = (json.dumps(record, separators=(',', ':')) + '\n').encode() with tempfile.SpooledTemporaryFile(max_size=10 * 1024) as reader: writer = gzip.GzipFile(filename='test', fileobj=reader) writer.writelines([json_line, json_line]) writer.close() reader.seek(0) gz_reader = S3Payload._gz_reader(reader) assert_equal(isinstance(gz_reader, gzip.GzipFile), True) assert_equal(gz_reader.read(), json_line + json_line)
def test_pre_parse(self): """S3Payload - Pre Parse""" with patch.object(S3Payload, '_read_file') as reader: reader.side_effect = [[(1, { 'key_01': 'value_01' }), (2, { 'key_02': 'value_02' })]] expected_result = [{'key_01': 'value_01'}, {'key_02': 'value_02'}] payload = S3Payload(None, self._record) result = [rec._record_data for rec in list(payload.pre_parse())] assert_equal(result, expected_result)
def test_unquote(self): """S3Payload - Unquote""" assert_equal(S3Payload._unquote('this%26that'), 'this&that')
def test_read_file_error(self, log_mock): """S3Payload - Read File, Exception""" boto3.resource('s3').Bucket(self._bucket).create() list(S3Payload(None, self._record)._read_file()) log_mock.assert_called_with('Failed to download object from S3')
def test_read_file_error(self): """S3Payload - Read File, Exception""" boto3.resource('s3').Bucket(self._bucket).create() payload = S3Payload(None, self._record) result = payload._read_file() assert_raises(ClientError, list, result)