def write(self, file: BinaryIO) -> bool: try: file.write(self.format()) file.flush() return True except (ValueError, OSError): return False
def write_message(message: str, stream: BinaryIO = sys.stdout.buffer) -> None: bytes_message = message.encode("utf-8") stream.write(b"Content-Length: ") stream.write(str(len(bytes_message)).encode("ascii")) stream.write(b"\r\n\r\n") stream.write(bytes_message) stream.flush()
def download_file(url: str, fd: BinaryIO) -> None: """ Download data from given URL and write it to the file descriptor This function is designed for speed as other approaches are not able to utilize full network speed :param url: target url to download the data from :param fd: Open file-like descriptor """ def _(*args, pbar, reader, **kwargs): # https://github.com/requests/requests/issues/2155 data = reader(*args, decode_content=True, **kwargs) pbar.update(len(data)) return data with requests.get(url, stream=True) as r: pbar = tqdm.tqdm( total=int(r.headers['Content-length']), unit="bytes", unit_scale=True, unit_divisor=1024, desc="Downloading file", disable=config.PROGRESSBAR_DISABLED, ) r.raw.read = partial( _, reader=r.raw.read, pbar=pbar, ) shutil.copyfileobj(r.raw, fd) # https://stackoverflow.com/a/39217788 fd.flush() pbar.close()
def write_binary_memory(memory_file: BinaryIO, memory: MemoryDict, field_bytes: int): """ Dumps the memory file. """ memory_file.write(memory.serialize(field_bytes)) memory_file.flush()
def resume_transfer(inFile: str, outFile: BinaryIO, offset: int) -> int: proc = FFMPEG_PROCS[inFile] offset -= proc.start count = 0 try: for block in proc.blocks: length = len(block) if offset < length: if offset > 0: block = block[offset:] outFile.write(b"%x\r\n" % len(block)) outFile.write(block) outFile.write(b"\r\n") count += len(block) offset -= length outFile.flush() except Exception as msg: LOGGER.info(msg) return count proc.start = proc.end proc.blocks = [] return count + transfer_blocks(inFile, outFile)
def io_write(stream: typing.BinaryIO, message: bytes) -> None: """ Write to a stream or its underlying buffer (if it exists). """ stream = get_stream_buffer(stream) stream.write(message) stream.flush()
def send(dst: BinaryIO, msg: bytes): size_bytes = _SIZE_FORMAT.pack(len(msg)) dst.write(size_bytes) dst.write(msg) dst.flush()
def _create_test_file(tfile: BinaryIO, nlines=10) -> List[str]: """Helper function for populating a testing temp file with numbered example lines for comparison""" lines = [ f"This is an example line {i}\n".encode('utf-8') for i in range(1, nlines + 1) ] tfile.writelines(lines) tfile.flush() return [l.decode().strip("\n") for l in lines]
def protobuf_to_filestream(file: BinaryIO, protostr: bytes) -> None: if file.closed or not file.writable(): return str_len: int = len(protostr) int_bytes: bytes = str_len.to_bytes(4, byteorder="big") file.write(int_bytes) file.write(protostr) file.flush()
def write_lsp_request(file: BinaryIO, request: Request) -> bool: request_string = request.serialize() length = len(request_string.encode("utf-8")) payload = f"Content-Length: {length}\r\n\r\n{request_string}".encode("utf-8") try: file.write(payload) file.flush() return True except (ValueError, OSError): return False
def write(self, file: BinaryIO) -> bool: try: payload = self.json() length = len(payload.encode("utf-8")) response = ("Content-Length: {}\r\n\r\n{}").format(length, payload) file.write(response.encode("utf-8")) file.flush() return True except (ValueError, OSError): return False
def write_message(file: BinaryIO, message: LanguageServerProtocolMessage) -> bool: try: payload = message.json() length = len(payload.encode("utf-8")) response = ("Content-Length: {}\r\n\r\n{}").format(length, payload) file.write(response.encode("utf-8")) file.flush() return True except (ValueError, OSError): return False
def archive(self, fp: BinaryIO, folder): fp.seek(self.src_start) for f in self.files: if not f['emptystream']: target = self.target_filepath.get(f.id, NullHandler()) # type: Handler target.open() length = self.compress(fp, folder, target) target.close() f['compressed'] = length self.files.append(f) fp.flush()
def archive(self, fp: BinaryIO, folder): """Run archive task for specified 7zip folder.""" fp.seek(self.src_start) for f in self.files: if not f['emptystream']: filepath = self.target_filepath[f.id] if filepath is not None: with filepath.open(mode='rb') as target: length = self.compress(fp, folder, target) f['compressed'] = length self.files.append(f) fp.flush()
def dump(self, out: typing.BinaryIO): """ Dump data from core profile session to a file. :param out: File object to write data to. """ while True: data = self._channel.receive_message() if data.startswith( self.STACKSHOT_HEADER) or data.startswith(b'bplist'): # Skip not kernel trace data. continue print(f'Receiving trace data ({len(data)}B)') out.write(data) out.flush()
def _detokenize_serial(databases: Iterable, device: serial.Serial, baudrate: int, show_errors: bool, output: BinaryIO, prefix: str) -> None: if output is sys.stdout: output = sys.stdout.buffer detokenizer = detokenize.Detokenizer(tokens.Database.merged(*databases), show_errors=show_errors) serial_device = serial.Serial(port=device, baudrate=baudrate) try: detokenize.detokenize_base64_live(detokenizer, serial_device, output, prefix) except KeyboardInterrupt: output.flush()
def write(self, outstream: typ.BinaryIO) -> None: """ writes the current metadata into a stream. If a password is set the data will be encrypted. """ # check if some information are missing if self.metadata["module"] is None: raise InformationMissingError("Module identifier is missing") if "version" not in self.metadata: raise InformationMissingError("Metadata version is missing") # write metadata to stream if self.password is None: outstream.write(json.dumps(self.metadata).encode("utf8")) else: outstream.write(encrypt(self.password, json.dumps(self.metadata))) outstream.flush()
def _ensure_length(t: int, file: BinaryIO, file_mmap: mmap) -> mmap: if len(file_mmap) > t: return file_mmap size = len(file_mmap) add_size = (t - size) * 2 + int(size / 2) file_mmap.flush() file_mmap.close() file.seek(size) file.write(b'\0' * add_size) file.flush() ans = mmap.mmap(file.fileno(), 0) assert (len(ans) > t) return ans
def _follow_and_detokenize_file(detokenizer: _Detokenizer, file: BinaryIO, output: BinaryIO, prefix: Union[str, bytes], poll_period_s: float = 0.01) -> None: """Polls a file to detokenize it and any appended data.""" try: while True: data = file.read() if data: detokenize_base64_to_file(detokenizer, data, output, prefix) output.flush() else: time.sleep(poll_period_s) except KeyboardInterrupt: pass
def _write_to_socket(socket_file: BinaryIO, message: Union[str, bytes]) -> None: try: socket_file.write(stem.util.str_tools._to_bytes(message)) socket_file.flush() except socket.error as exc: log.info('Failed to send: %s' % exc) # When sending there doesn't seem to be a reliable method for # distinguishing between failures from a disconnect verses other things. # Just accounting for known disconnection responses. if str(exc) == '[Errno 32] Broken pipe': raise stem.SocketClosed(exc) else: raise stem.SocketError(exc) except AttributeError: # if the control_file has been closed then flush will receive: # AttributeError: 'NoneType' object has no attribute 'sendall' log.info('Failed to send: file has been closed') raise stem.SocketClosed('file has been closed')
def transfer_blocks(inFile: str, outFile: BinaryIO) -> int: proc = FFMPEG_PROCS[inFile] blocks = proc.blocks count = 0 while True: try: block = proc.process.stdout.read(BLOCKSIZE) proc.last_read = time.time() except Exception as msg: LOGGER.info(msg) cleanup(inFile) kill(proc.process) break if not block: try: outFile.flush() except Exception as msg: LOGGER.info(msg) else: cleanup(inFile) break blocks.append(block) proc.end += len(block) if len(blocks) > MAXBLOCKS: proc.start += len(blocks[0]) blocks.pop(0) try: outFile.write(b"%x\r\n" % len(block)) outFile.write(block) outFile.write(b"\r\n") count += len(block) except Exception as msg: LOGGER.info(msg) break return count
def detokenize_base64_live(detokenizer: _Detokenizer, input_file: BinaryIO, output: BinaryIO, prefix: Union[str, bytes] = BASE64_PREFIX, recursion: int = DEFAULT_RECURSION) -> None: """Reads chars one-at-a-time and decodes messages; SLOW for big files.""" prefix_bytes = prefix.encode() if isinstance(prefix, str) else prefix base64_message = _base64_message_regex(prefix_bytes) def transform(data: bytes) -> bytes: return base64_message.sub( _detokenize_prefixed_base64(detokenizer, prefix_bytes, recursion), data) for message in PrefixedMessageDecoder( prefix, string.ascii_letters + string.digits + '+/-_=').transform( input_file, transform): output.write(message) # Flush each line to prevent delays when piping between processes. if b'\n' in message: output.flush()
def update_state_initialized(to_parent: BinaryIO) -> None: payload = State.INITIALIZED.value.to_bytes( 1, 'big') + os.getpid().to_bytes(4, 'big') to_parent.write(payload) to_parent.flush()
def update_state(to_parent: BinaryIO, state: State) -> None: to_parent.write(state.value) to_parent.flush()
def update_state(to_parent: BinaryIO, state: State) -> None: to_parent.write(state.value.to_bytes(1, 'big')) to_parent.flush()
def update_state_finished(to_parent: BinaryIO, finished_payload: bytes) -> None: payload = State.FINISHED.value.to_bytes(1, 'big') + finished_payload to_parent.write(payload) to_parent.flush()
def write(self, input: Iterable[Any], output: BinaryIO) -> None: output = io.TextIOWrapper(output, encoding="UTF-8") for item in input: output.write(json.dumps(item)) output.write("\n") output.flush()
def write_to_file(data: bytes, output: BinaryIO = sys.stdout.buffer): output.write(data + b'\n') output.flush()