def _download_and_uncompress_dataset(self, destination: str, url: str): """ Downloads dataset and uncompresses it. Args: destination (:obj:`str`): The dataset cached directory. url (:obj: str): The link to be downloaded a dataset. """ if not os.path.exists(destination): dataset_package = download(url=url, path=DATA_HOME) if is_xarfile(dataset_package): unarchive(dataset_package, DATA_HOME) else: logger.info("Dataset {} already cached.".format(destination))
def download_file_and_uncompress(self, url: str, save_path: str, print_progress: bool): with utils.generate_tempdir() as _dir: if print_progress: with log.ProgressBar('Download {}'.format(url)) as bar: for path, ds, ts in utils.download_with_progress(url=url, path=_dir): bar.update(float(ds) / ts) else: path = utils.download(url=url, path=_dir) if print_progress: with log.ProgressBar('Decompress {}'.format(path)) as bar: for path, ds, ts in xarfile.unarchive_with_progress(name=path, path=save_path): bar.update(float(ds) / ts) else: path = xarfile.unarchive(name=path, path=save_path)