def extract_files(src, dst, judge_func, *, gz=False, group=False): src = src.replace("/", "\\").rstrip("\\") + "\\" if group: groups = defaultdict(set) with tqdm(Path(src).rglob("*"), "搜索", unit="文件", ascii=True) as bar: all_files = {str(fn) for fn in bar} with tqdm(all_files, "分类", unit="文件", ascii=True) as bar: for fn in filter(judge_func, bar): ft = datetime.fromtimestamp(getmtime(fn)) groups[(ft.year, ft.month, ft.day)].add(fn.replace(src, "")) with tar_open(dst, "w" + (":gz" if gz else "")) as tarf, tqdm( groups.items(), "导出组", ascii=True) as bar: for (year, month, day), files in bar: tarfn_head = join( f"{str(year).zfill(4)}-{str(month).zfill(2)}", str(day).zfill(2), ) with tqdm(files, "导出文件", leave=False, ascii=True) as inner_bar: for fn in inner_bar: tarf.add(join(src, fn), join(tarfn_head, fn), recursive=False) else: with tqdm(Path(src).rglob("*"), "搜索", unit="文件", ascii=True) as bar: all_files = {str(fn) for fn in bar} with tqdm(all_files, "分类", unit="文件", ascii=True) as bar: valid_files = { fn.replace(src, "") for fn in filter(judge_func, bar) } with tar_open(dst, "w" + (":gz" if gz else "")) as tarf, tqdm( valid_files, "导出", ascii=True) as bar: for fn in bar: tarf.add(join(src, fn), fn, recursive=False)
def extractTar(tar_path: str, directory_path: str, remove_tar=False): if not path.isdir(directory_path): mkdir(directory_path) with tar_open(tar_path, "r:gz") as tar_file: tar_file.extractall(path=directory_path) if remove_tar: remove(tar_path)
async def tar_file(event): "To create tar file" input_str = event.pattern_match.group(1) if not input_str: return await edit_delete(event, "`Provide file path to compress`") if not os.path.exists(Path(input_str)): return await edit_or_reply( event, f"There is no such directory or file with the name `{input_str}` check again", ) if os.path.isfile(Path(input_str)): return await edit_delete(event, "`File compressing is not implemented yet`") mone = await edit_or_reply(event, "`Tar creation in progress....`") start = datetime.now() filePaths = zipdir(input_str) filepath = os.path.join(Config.TMP_DOWNLOAD_DIRECTORY, os.path.basename(Path(input_str))) destination = f"{filepath}.tar.gz" zip_file = tar_open(destination, "w:gz") with zip_file: for file in filePaths: zip_file.add(file) end = datetime.now() ms = (end - start).seconds await mone.edit( f"Created a tar file for the given path {input_str} as `{destination}` in __{ms}__ Seconds" )
def find_driver_path(self, driver_version): null_path = '/dev/null' bits = 64 if maxsize > 2**32 else 32 if platform.startswith('linux'): identifier = 'linux%s' % bits elif platform == 'win32': identifier = 'win%s' % bits null_path = 'NUL' elif platform == 'darwin': identifier = 'macos' else: return None, None driver_path = join(PATH_RESOURCES, 'geckodriver-%s' % identifier) if not exists(driver_path): url = Firefox.URL_DRIVER % driver_version page = get(url).text url_driver = urljoin(url, findall(Firefox.REGEX_LINK % identifier, page)[0]) compressed_path = join(PATH_RESOURCES, url_driver.rsplit('/', 1)[1]) download_file(url_driver, compressed_path) if compressed_path.endswith('.zip'): from zipfile import ZipFile with ZipFile(compressed_path, 'r') as zf: with open(driver_path, 'wb') as f: f.write(zf.read(zf.namelist()[0])) else: from tarfile import open as tar_open with tar_open(compressed_path, 'r:gz') as tf: with tf.extractfile('geckodriver') as gd, open(driver_path, 'wb') as f: f.write(gd.read()) remove(compressed_path) chmod(driver_path, 755) return driver_path, null_path
def main(): logger = create_logger(PurePath(__file__).stem) logger.setLevel(INFO) distutils.log = logger base_backup_name = getenv('BACKUP_NAME', 'minecraft-vanillaplusplus-backup') date_stamp = datetime.now().strftime("%G-W%V-%u-%H-%M-%S") backup_name = f'{base_backup_name}-{date_stamp}.tar.xz' create_backup_path = f'/mnt/{backup_name}' backup_path = '/mnt/source/' final_backup_destination = f'/mnt/backups/{backup_name}' temp_path = '/mnt/tmp/' try: remove_tree(temp_path) except FileNotFoundError: pass copy_tree(backup_path, temp_path) logger.info('Creating Backup') with tar_open(create_backup_path, 'w:xz') as tar: tar.add(f'{temp_path}.', arcname='') logger.info('Moving Backup') move(create_backup_path, final_backup_destination) logger.info('Successfully executed cron job')
def test_execute_no_delete(self): # If the sending failed... self.inst.send_sensor_data.return_value = False self.inst.execute() # Then the archives should not have been deleted self.assertItemsEqual(listdir(self.output_dir), self.output) # The tar files should still exist self.assertEqual(sorted(listdir(self.output_dir)), self.output) # The members should be as expected also file_path = join(self.output_dir, self.output[0]) with tar_open(file_path, mode=self.tar_read_mode) as tarball: self.assertEqual(tarball.getnames(), self.ready[0:2]) file_path = join(self.output_dir, self.output[1]) with tar_open(file_path, mode=self.tar_read_mode) as tarball: self.assertEqual(tarball.getnames(), self.ready[2:4])
def get_paper(*source_files): """ Walks through all *.nxml files contained in tar archives passed as file names, and returns them as tuple (path, paper) where: * path is the path of the nxml-file inside the tar file * paper is an paper_record object containing the contents of the nxml-file """ for file_name in source_files: logging.info('Processing bucket %s' % file_name ) fh = tar_open(file_name) # raises IO error if file not found for file_info in fh: # filter *.nxml files if file_info.isdir() or not file_info.name.endswith('.nxml'): continue path = file_info.name try: paper = Paper(fh.extractfile(file_info)) except SyntaxError as err: logging.warning("{0} in {1}".format(err,path)) yield path, paper fh.close()
def _archive_files(self, file_list, archive_path): with tar_open(archive_path, mode=self.tar_mode) as tarball: for file_path in file_list: try: tarball.add(file_path, arcname=basename(file_path)) except (IOError, OSError): logging.warning('Could not add %s', file_path)
def restore(self, backupfile=None): if backupfile is None: dir_path = Path('/backup') for f in dir_path.iterdir(): if f.name == 'NetworkManager': file_path = f break else: file_path = Path(backupfile) if file_path.exists(): tmp_dir = Path(mkdtemp()) tar = tar_open(file_path) tar.extractall(tmp_dir) tar.close() if tmp_dir.joinpath('nat').exists(): external_interface = 'eth1' with tmp_dir.joinpath('nat').open('r', encoding='utf-8') as fd: external_interface = fd.readline().strip() self.set_nat(True, True, external_interface) for f in self.backup_files: print("Restoring {0}".format(f)) tmp_path = tmp_dir.joinpath(f.relative_to('/')) if tmp_path.exists(): shutil_copy(tmp_path, f) self.apply_changes() return n4d.responses.build_successful_call_response("True")
def backup(self, dir_path="/backup"): try: backup_dir = Path(dir_path) backup_dir.mkdir(parents=True, exist_ok=True) file_path = backup_dir.joinpath(get_backup_name("NetworkManager")) tar = tar_open(file_path, "w:gz") for f in self.backup_files: if Path(f).exists(): tar.add(f) if self.get_nat_persistence()['return']: aux_file = NamedTemporaryFile('w', delete=False, encoding='utf-8') aux_file_path = Path(aux_file.name) aux_file.file.write(self.external_interface) tar.add(aux_file.name, arcname='nat') tar.close() if "aux_file_path" in locals() and aux_file_path.exists(): aux_file_path.unlink() self.dprint("Backup generated in {}".format(file_path)) return n4d.responses.build_successful_call_response(str(file_path)) except Exception as e: self.dprint("Backup failed: {0}".format(str(e))) return n4d.responses.build_failed_call_response( NetworkManager.BACKUP_FAILED, "[NetworkManager] backup failed {0}".format(str(e)))
async def untar_file(event): # sourcery no-metrics "To unpack the tar file" if input_str := event.pattern_match.group(1): path = Path(input_str) if os.path.exists(path): start = datetime.now() if not is_tarfile(path): return await edit_delete( event, f"`The Given path {path} is not tar file to unpack`" ) mone = await edit_or_reply(event, "`Unpacking....`") destination = os.path.join( Config.TMP_DOWNLOAD_DIRECTORY, (os.path.basename(path).split("."))[0] ) if not os.path.exists(destination): os.mkdir(destination) file = tar_open(path) # extracting file file.extractall(destination) file.close() end = datetime.now() ms = (end - start).seconds await mone.edit( f"**Time Taken :** `{ms} seconds`\ \nUnpacked the input path `{input_str}` and stored to `{destination}`" ) else: await edit_delete(event, f"I can't find that path `{input_str}`", 10)
def generate(server, name, word_list_path): # Create an answer string words = " ".join(x for x in read_words(word_list_path)) # Inform what words were generated print(name + " telegram: Generated following words: " + words) # Add the key to the cache, to check the answer later server.cache[name + "_telegram"] = words # Generate the cipher text cipher_text = generate_cipher_text(words) # create the file temp_file_out = open(FLAG_PATH, "w") temp_file_out.write(cipher_text) temp_file_out.close() with tar_open(TAR_FILE_PATH, "w:gz") as tar: tar.add(FLAG_PATH, arcname=path.basename("navigation_and_encoding")) tar.close() remove(FLAG_PATH) # server debug message print("telegram output successfully created:" + cipher_text) # Inform the user that the challenge was generated successfully return "Successfully generated the challenge. Check the output folder for the file - " \ "we compressed it to save space!" + "\r\n"
def process_tar(tar_path): ''' Extract the directory `bin` contained in the tar file and remove the archive. :return: `True` if the tar has been processed correctly. Otherwise, the function returns `False`. :rtype: bool ''' if not isfile(tar_path): return 1 correct_execution = True tar = None try: tar = tar_open(tar_path, 'r') tar.extractall(path=get_execution_path()) tar.close() except BaseException as be: if tar: tar.close() correct_execution = False # No error if 'remove' raises an exception. The aim of the function # is to extract the executable from the archive. try: remove(tar_path) except BaseException as be: pass return correct_execution
def download_tar(url): # Why this small function? # Unit testing and future centralized downloader # TODO: Centralized downloading try: return tar_open(fileobj=BytesIO(get(url).content)) except (TarError, RequestException) as e: log.info('Got error while downloading %r', url, exc_info=e)
def test_execute_ioerror(self, mock_remove): # If we can't read one of the files... remove(join(self.input_dir, self.ready[0])) self.inst.execute() # ...it won't be added to the tar file. outfile_path = join(self.output_dir, self.output[0]) with tar_open(outfile_path, mode=self.tar_read_mode) as tarball: self.assertItemsEqual(tarball.getnames(), self.ready[1:2])
def process(self): # Delete directory if present, then extract new archive rmtree(join(settings.VUE_ROOT, 'dist'), ignore_errors=True) with tar_open(fileobj=self.cleaned_data['file'].file, mode='r:gz') as archive: archive.extractall(settings.VUE_ROOT) # Extract index.html to templates dir archive.extract(archive.getmember('dist/index.html'), settings.TEMPLATES[0]['DIRS'][0]) archive.close() call_command('collectstatic', verbosity=0, interactive=False)
def extract_tar_gz(tar_path: str, extract_path: str) -> None: def tqdm_progress(members): extract_progress_bar = tqdm(total=len(list(members.getnames()))) for member in members: extract_progress_bar.update() yield member extract_progress_bar.close() with tar_open(tar_path, 'r:gz') as tarball: tarball.extractall(extract_path, members=tqdm_progress(tarball))
def main(): logger = create_logger(PurePath(__file__).stem) if not getenv('SPIGOT_SKIP_BACKUP', 'False').lower() in ['true', 't', 'y', 'yes', '1']: logger.info('Creating Backup') date_stamp = datetime.now().strftime("%G-W%V-%u-%H-%M-%S") with tar_open(f'/mnt/minecraft/spigot-backup-{date_stamp}.tar.lzma', 'w:xz') as tar: tar.add('/mnt/minecraft/.')
def compress_rr_trace(src, dest): # resolve symlink to latest trace available latest_trace = (src / "latest-trace").resolve(strict=True) assert latest_trace.is_dir(), "missing latest-trace directory" rr_arc = dest / "rr.tar.bz2" LOG.debug("creating %r from %r", rr_arc, latest_trace) with tar_open(rr_arc, "w:bz2") as arc_fp: arc_fp.add(str(latest_trace), arcname=latest_trace.name) # remove path containing uncompressed traces rmtree(str(src)) return rr_arc
def compress_rr_trace(src, dest): # resolve symlink to latest trace available latest_trace = realpath(pathjoin(src, "latest-trace")) assert isdir(latest_trace), "missing latest-trace directory" rr_arc = pathjoin(dest, "rr.tar.bz2") LOG.debug("creating %r from %r", rr_arc, latest_trace) with tar_open(rr_arc, "w:bz2") as arc_fp: arc_fp.add(latest_trace, arcname=basename(latest_trace)) # remove path containing uncompressed traces rmtree(src) return rr_arc
def get_info(self) -> Sequence[Tuple[str, int]]: """Returns PACK info""" if is_zipfile(self._file_path): with ZipFile(self._file_path, 'r') as z_f: return tuple((z_.filename, z_.file_size) for z_ in z_f.infolist()) elif is_rarfile(self._file_path): with RarFile(self._file_path, 'r') as r_f: return tuple((r_.filename, r_.file_size) for r_ in r_f.infolist()) else: with tar_open(self._file_path, 'r') as t_f: return tuple((t_.name, t_.size) for t_ in t_f.getmembers())
def mock_open(name, mode, files=[["meta_data/insights.spec-small", 1], ["meta_data/insights.spec-big", 1], ["data/insights/small", 1], ["data/insights/big", 100]]): base_path = "./insights-client" with tempfile.TemporaryFile(suffix='.tar.gz') as f: tarball = tar_open(fileobj=f, mode='w:gz') for file in files: member = tarfile.TarInfo(name=os.path.join(base_path, file[0])) member.size = file[1] tarball.addfile(member, None) return tarball
def run(self): """Worker loop.""" while True: try: year, filename = self.queue.get(timeout=2, block=True) except Empty: continue LOG.info('Fetching %s', self.conf.url + filename) res = get(self.conf.url + filename, stream=True) if not (200 <= res.status_code < 300): LOG.warning('Cannot download %s for some reason', filename) self.queue_done.put(year, block=False) continue result = f'./{year}.gz' if self.conf.is_compress else f'./{year}' if os_path.isfile(result) and not self.conf.force: LOG.info('The ./%s.txt is already exists', year) self.queue_done.put(year, block=False) continue if os_path.isfile(result) and self.conf.force: LOG.info('The ./%s.txt is already exists; removing', year) remove(result) tmp = self.conf.tmp_dir / year tmp.mkdir(exist_ok=True, parents=True) LOG.info('Dumping %s into %s', filename, tmp.absolute()) with tar_open(fileobj=BytesIO(res.raw.read()), mode='r:gz') as tar: data = (m.name for m in tar.getmembers()) data = (m for m in data if match(self.conf.member_regex, m)) tar.extractall(tmp.absolute()) _open = gzip_open if self.conf.is_compress else open with _open(result, 'ab') as fd_result: for data_file in data: LOG.info('Aggregating %s into %s', data_file, result) with (tmp / data_file).open(mode='rb') as fd_data_file: fd_result.write(fd_data_file.read()) self.queue_done.put(year, block=False)
def dir_backup(self): src_basename = os.path.basename(self.src) dest = f'{self.path}{src_basename}.tar.gz' if os.path.isfile(dest) and is_tarfile(dest): Printer.print( f'{Fore.YELLOW}{dest} already exist. Skipping!{Fore.RESET}') return FileResult.data.append( [self.src, " ", " ", f'{Fore.YELLOW}Skipped{Fore.RESET}']) Printer.print( f'{Fore.YELLOW}Archiving {self.src} to {dest}.{Fore.RESET}') with tar_open(dest, "w:gz") as target_fd: target_fd.add(self.src, arcname=src_basename) return FileResult.data.append([ self.src, sizeof(self.src), convert_size(os.path.getsize(dest)) + ' (compressed)', f'{Fore.GREEN}OK{Fore.RESET}' ])
def untar_task(uploaded_task): if not uploaded_task.is_correct(): return uploaded_task error_status = UploadedTaskDeployStatus( uploaded_task=uploaded_task, phase=UploadedTaskDeployStatus.PHASE_UNTAR, ) uploaded_path = uploaded_task.path untarred_path, ext = splitext_all(uploaded_path) try: tar_file = tar_open(uploaded_task.path) except Exception, ex: error_status.message = 'Error opening tar file: %s' % str(ex) error_status.save() return uploaded_task
def export_branch(self, branch): checkout_dir = None if branch == self.get_current_branch(): return self.get_git_root() try: checkout_dir, exported = self._get_export_directory(branch) if not exported: export_branch = self._resolve_branch(branch) if not export_branch: raise CheckoutException("Failed to resolve branch " + branch + " for export") proc = Popen( ["git", "archive", "--format", "tar", export_branch], stdout=PIPE, stderr=open(devnull, 'w')) tar = tar_open(mode="r|", fileobj=proc.stdout) tar.extractall(path=checkout_dir) except TarReadError: raise CheckoutException("Failed to export branch " + branch) return checkout_dir
def format_checks(uploaded_task): if not uploaded_task.is_uploaded(): return uploaded_task error_status = UploadedTaskDeployStatus( uploaded_task=uploaded_task, phase=UploadedTaskDeployStatus.PHASE_FORMAT_CHECK) untarred_path, ext = splitext_all(uploaded_task.path) supported_exts = ['.tar.gz', '.tar.bz2', '.tar'] if ext not in supported_exts: msg = 'Unsupported format "{ext}". Should be one of {supported}.' msg = msg.format(ext=ext, supported=', '.join(supported_exts)) error_status.message = msg error_status.save() return uploaded_task tar_file = None try: tar_file = tar_open(uploaded_task.path) except Exception, ex: error_status.message = 'Error opening tar file: %s' % str(ex) error_status.save() return uploaded_task
def find_driver_path(): null_path = '/dev/null' bits = 64 if maxsize > 2**32 else 32 if platform.startswith('linux'): identifier = 'linux%s' % bits elif platform == 'win32': identifier = 'win%s' % bits null_path = 'NUL' elif platform == 'darwin': identifier = 'macos' else: log( 'info', 'Platform %s not identified. You will have to download and install your own webdriver from %s.' % (platform, URL_GECKODRIVER)) return None driver_path = join(PATH_RESOURCES, 'geckodriver-%s' % identifier) if not exists(driver_path): page = get(URL_GECKODRIVER).text url_driver = urljoin( URL_GECKODRIVER, findall( 'href="(/mozilla/geckodriver/releases/download/.+?' + identifier + '.+?)"', page)[0]) compressed_path = join(PATH_RESOURCES, url_driver.rsplit('/', 1)[1]) download_file(url_driver, compressed_path) if compressed_path.endswith('.zip'): from zipfile import ZipFile with ZipFile(compressed_path, 'r') as zf: with open(driver_path, 'wb') as f: f.write(zf.read(zf.namelist()[0])) else: from tarfile import open as tar_open with tar_open(compressed_path, 'r:gz') as tf: with tf.extractfile('geckodriver') as gd, open( driver_path, 'wb') as f: f.write(gd.read()) remove(compressed_path) chmod(driver_path, 755) return driver_path, null_path
def format_checks(uploaded_task): if not uploaded_task.is_uploaded(): return uploaded_task error_status = UploadedTaskDeployStatus( uploaded_task=uploaded_task, phase=UploadedTaskDeployStatus.PHASE_FORMAT_CHECK ) untarred_path, ext = splitext_all(uploaded_task.path) supported_exts = ['.tar.gz', '.tar.bz2', '.tar'] if ext not in supported_exts: msg = 'Unsupported format "{ext}". Should be one of {supported}.' msg = msg.format(ext=ext, supported=', '.join(supported_exts)) error_status.message = msg error_status.save() return uploaded_task tar_file = None try: tar_file = tar_open(uploaded_task.path) except Exception, ex: error_status.message = 'Error opening tar file: %s' % str(ex) error_status.save() return uploaded_task
def file_backup(path: str, backup_dir: str): # remove slash at the end of the path if path.endswith("/"): path = path[:-1] print_verbose(f"Backup of {path} has been started!") base = os.path.basename(path) filename = f"{backup_dir}/{base}.tar.gz" # check if the backup is already done if os.path.exists(filename): print_verbose(f"Backup of {path} has been skipped!") return [path, " ", " ", f"{Fore.YELLOW}Skipped{Fore.RESET}"] if os.path.isfile(path): # file backup copyfile(path, filename) return [ path, sizeof(path), "-", f"{Fore.GREEN}O{Fore.RESET}" if os.path.exists(filename) else f"{Fore.RED}Failed{Fore.RESET}", ] else: # directory backup with tar_open(filename, "w:gz") as target_fd: target_fd.add(path, arcname=base) return [ path, sizeof(path), convert_size(os.path.getsize(filename)), f"{Fore.GREEN}OK{Fore.RESET}" if os.path.exists(filename) else f"{Fore.RED}Failed{Fore.RESET}", ]
def run(self, arguments, directory, settings): url = settings.get_value("appstore", "url").rstrip("/") user = settings.get_value("appstore", "user") password = settings.get_value("appstore", "password") archive_dir = arguments.archive app_name = basename(archive_dir).split(".")[0] # parse the appinfo/info.xml from the archive to fill in stuff required # for the release archive = tar_open(archive_dir) # TODO: we need app validation like: # * name of the folder is the same as the id in info.xml # * no private api usage # * all needed fields for info.xml present # * size not bigger than allowed to upload info_xml = archive.extractfile( dict(zip(archive.getnames(), archive.getmembers()))["%s/appinfo/info.xml" % app_name] ) parser = InfoParser() result = parser.parse(info_xml.read()) # no ocsid present means not yet in the appstore so let's upload it params = { "name": result["name"], "type": result["category"], "depend": result["requiremin"], "downloadtype1": 0, "licensetype": result["licence"], "version": result["version"], } if result["homepage"] != "": params["homepage"] = result["homepage"] params["homepagetype"] = "Homepage" if result["repository"] != "": params["homepage2"] = result["repository"] params["homepagetype2"] = "Version Control" if result["bugs"] != "": params["homepage3"] = result["bugs"] params["homepagetype3"] = "Issue Tracker" if result["requiremax"] != "": params["depend2"] = result["requiremax"] if result["ocsid"] == "": create_url = "%s/content/add " % url response = requests.post(create_url, params=params, auth=(user, password)) code = self.get_status_code(response) if code == "102": raise Exception("Not authorized! Check your credentials.") # get ocsid tree = ElementTree.fromstring(response.text) ocsid = tree.findtext(".//data/content/id") print( "Please add <ocsid>%s</ocsid> to your appinfo/info.xml to " + "be able to update the uploaded app" % ocsid ) else: update_url = "%s/content/edit/%s" % (url, result["ocsid"]) response = requests.post(update_url, params=params, auth=(user, password)) code = self.get_status_code(response) if code == "102": raise Exception("Not authorized! Check your credentials.") upload_file_url = "%s/content/uploaddownload/%s" % (url, result["ocsid"]) file = {"localfile": open(archive_dir, "rb")} response = requests.post(files=file) code = self.get_status_code(response) if code == "101": raise Exception("Could not upload file. Is the archive bigger " + "than 10Mb?") elif code == "103": raise Exception("Not authorized! Check your credentials.")
if __name__ == "__main__": parser = ArgumentParser('Input source molecule') parser.add_argument('-sourcefile', type=str) parser.add_argument('-input_format', type=str) parser.add_argument('-target_sbml', type=str) parser.add_argument('-output', type=str) params = parser.parse_args() if params.input_format == 'sbml': rpmerge = rpSBML.mergeSBMLFiles(params.sourcefile, params.target_sbml, params.output) else: with TemporaryDirectory() as tmpInputFolder: with TemporaryDirectory() as tmpOutputFolder: tar = tar_open(params.sourcefile, 'r') tar.extractall(path=tmpInputFolder) tar.close() if len(glob(tmpInputFolder + '/*')) == 0: print('*** ERROR: Input file is empty') for sbml_file in glob(tmpInputFolder + '/*'): out_f = os_path.basename(params.sourcefile).split( '.')[0] + '-' + os_path.basename( params.target_sbml).split('.')[0] rpmerge = rpSBML.mergeSBMLFiles( sbml_file, params.target_sbml, tmpOutputFolder + '/' + out_f) # with TemporaryDirectory() as tmpInputFolder: # with TemporaryDirectory() as tmpOutputFolder: # tar = tar_open(input_tar, 'r')
def create_tar(self, output, files): with tar_open(output, 'w:' + self.TAR_MODE[self.compression]) as tar: for fname, dest in files.items(): logger.debug('Adding ' + fname + ' as ' + dest) tar.add(fname, dest)