def _fetch_and_extract_one_backup(self, metadata, file_size, fetch_fn): with subprocess.Popen(self._build_tar_args(metadata), bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) as tar: common.increase_pipe_capacity(tar.stdin, tar.stderr) sink = rohmufile.create_sink_pipeline( file_size=file_size, key_lookup=config.key_lookup_for_site(self.config, self.site), metadata=metadata, output=tar.stdin) # It would be prudent to read stderr while we're writing to stdin to avoid deadlocking # if stderr fills up but in practice tar should write very little to stderr and that # should not become a problem. try: fetch_fn(sink) except BrokenPipeError: self.log.error("External tar returned an error: %r", tar.stderr.read()) raise tar.stdin.close() tar.stdin = None output = tar.stderr.read() exit_code = tar.wait() file_name = "<mem_bytes>" if isinstance(self.data_file, tuple) else self.data_file if exit_code != 0: raise Exception( "tar exited with code {!r} for file {!r}, output: {!r}". format(exit_code, file_name, output)) self.log.info("Processing of %r completed successfully", file_name)
def _fetch_and_extract_one_backup(self, metadata, file_size, fetch_fn): with subprocess.Popen(self._build_tar_args(metadata), bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE) as tar: common.increase_pipe_capacity(tar.stdin, tar.stderr) sink = rohmufile.create_sink_pipeline(file_size=file_size, key_lookup=config.key_lookup_for_site(self.config, self.site), metadata=metadata, output=tar.stdin) # It would be prudent to read stderr while we're writing to stdin to avoid deadlocking # if stderr fills up but in practice tar should write very little to stderr and that # should not become a problem. try: fetch_fn(sink) except BrokenPipeError: self.log.error("External tar returned an error: %r", tar.stderr.read()) raise tar.stdin.close() tar.stdin = None output = tar.stderr.read() exit_code = tar.wait() file_name = "<mem_bytes>" if isinstance(self.data_file, tuple) else self.data_file if exit_code != 0: raise Exception("tar exited with code {!r} for file {!r}, output: {!r}".format( exit_code, file_name, output)) self.log.info("Processing of %r completed successfully", file_name)
def _fetch_delta_file(self, metadata, fetch_fn): with open(os.path.join(self.pgdata, self.file_info.new_name), "wb") as target_file: sink = rohmufile.create_sink_pipeline( output=target_file, file_size=self.file_info.size, metadata=metadata, key_lookup=config.key_lookup_for_site(self.config, self.site), ) fetch_fn(sink) self.log.info( "Processing of delta file completed successfully: %r -> %r", self.file_info.name, self.file_info.new_name)
def fetch(self, site, key, target_path): try: lookup = key_lookup_for_site(self.config, site) data, metadata = self.transfer.get_contents_to_string(key) if isinstance(data, str): data = data.encode("latin1") file_size = len(data) with open(target_path, "wb") as target_file: output = create_sink_pipeline( output=target_file, file_size=file_size, metadata=metadata, key_lookup=lookup, throttle_time=0) output.write(data) return file_size, metadata except Exception: if os.path.isfile(target_path): os.unlink(target_path) raise
def fetch(self, site, key, target_path): try: lookup = key_lookup_for_site(self.config, site) data, metadata = self.transfer.get_contents_to_string(key) if isinstance(data, str): data = data.encode("latin1") file_size = len(data) with open(target_path, "wb") as target_file: output = create_sink_pipeline( output=target_file, file_size=file_size, metadata=metadata, key_lookup=lookup, throttle_time=0 ) output.write(data) return file_size, metadata except Exception: if os.path.isfile(target_path): os.unlink(target_path) raise