def test_reader_buffering(self): first_part = b'{"a": 1, "b": 2}\n{"a": 2,' second_part = b'"b": 3}\n{"a": 3, "b": 4}\n' reader = LDJSONReader("yip", ROOT_LOGGER) buffer = BytesIO(first_part) reader.file.fds = buffer reader.file.fds.name = "yip" items = list(reader.read(last_pass=False)) self.assertEqual(len(items), 1) buffer.write(second_part) items = list(reader.read(last_pass=False)) self.assertEqual(len(items), 2)
def __get_jtls_and_more(self): """ Compress all files in artifacts dir to single zipfile :return: BytesIO """ mfile = BytesIO() max_file_size = self.settings.get('artifact-upload-size-limit', 10) * 1024 * 1024 # 10MB with zipfile.ZipFile(mfile, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zfh: for handler in self.engine.log.parent.handlers: if isinstance(handler, logging.FileHandler): zfh.write(handler.baseFilename, os.path.basename(handler.baseFilename)) for root, _dirs, files in os.walk(self.engine.artifacts_dir): for filename in files: if os.path.getsize(os.path.join( root, filename)) <= max_file_size: zfh.write( os.path.join(root, filename), os.path.join( os.path.relpath(root, self.engine.artifacts_dir), filename)) else: msg = "File %s exceeds maximum size quota of %s and won't be included into upload" self.log.warning(msg, filename, max_file_size) return mfile
def __get_jtls_and_more(self): """ Compress all files in artifacts dir to single zipfile :return: BytesIO """ mfile = BytesIO() with zipfile.ZipFile(mfile, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zfh: for handler in self.engine.log.parent.handlers: if isinstance(handler, logging.FileHandler): zfh.write(handler.baseFilename, os.path.basename(handler.baseFilename)) for root, _dirs, files in os.walk(self.engine.artifacts_dir): for filename in files: zfh.write(os.path.join(root, filename), filename) return mfile