def download_dataset(path: str, otb50: bool = False): from vot.utilities.net import download_uncompress, join_url, NetworkException dataset = _SEQUENCES if otb50: dataset = {k: v for k, v in dataset.items() if k in _OTB50_SUBSET} with Progress("Downloading", len(dataset)) as progress: for name, metadata in dataset.items(): name = metadata.get("base", name) if not os.path.isdir(os.path.join(path, name)): try: download_uncompress(join_url(_BASE_URL, "%s.zip" % name), path) except NetworkException as ex: raise DatasetException( "Unable do download sequence data") from ex except IOError as ex: raise DatasetException( "Unable to extract sequence data, is the target directory writable and do you have enough space?" ) from ex progress.relative(1)
def download_bundle(url: str, path: str = "."): """Downloads a dataset bundle as a ZIP file and decompresses it. Args: url (str): Source bundle URL path (str, optional): Destination directory. Defaults to ".". Raises: DatasetException: If the bundle cannot be downloaded or is not supported. """ from vot.utilities.net import download_uncompress, NetworkException if not url.endswith(".zip"): raise DatasetException("Unknown bundle format") logger.info( 'Downloading sequence bundle from "%s". This may take a while ...', url) try: download_uncompress(url, path) except NetworkException as e: raise DatasetException( "Unable do download dataset bundle, Please try to download the bundle manually from {} and uncompress it to {}'" .format(url, path)) except IOError as e: raise DatasetException( "Unable to extract dataset bundle, is the target directory writable and do you have enough space?" )
def download_dataset(dataset_name, path): url = VOT_DATASETS[dataset_name] meta = download_json(url) base_url = get_base_url(url) + '/' for sequence in tqdm(meta['sequences']): sequence_directory = os.path.join(path, sequence['name']) os.makedirs(sequence_directory, exist_ok=True) annotations_url = join_url(base_url, sequence['annotations']['url']) download_uncompress(annotations_url, sequence_directory) for cname, channel in sequence['channels'].items(): channel_directory = os.path.join(sequence_directory, cname) os.makedirs(channel_directory, exist_ok=True) channel_url = join_url(base_url, channel['url']) tmp_zip = osp.join(channel_directory, f'{sequence["name"]}.zip') download_url(channel_url, tmp_zip) try: extract_files(tmp_zip, channel_directory) except zipfile.BadZipFile: print(f'[Error]: Please download {sequence["name"]} video \ manually through the {channel_url}') os.remove(tmp_zip)
def download(self, url, path="."): from vot.utilities.net import download_uncompress, download_json, get_base_url, join_url, NetworkException if os.path.splitext(url)[1] == '.zip': logger.info( 'Downloading sequence bundle from "%s". This may take a while ...', url) try: download_uncompress(url, path) except NetworkException as e: raise DatasetException( "Unable do download dataset bundle, Please try to download the bundle manually from {} and uncompress it to {}'" .format(url, path)) except IOError as e: raise DatasetException( "Unable to extract dataset bundle, is the target directory writable and do you have enough space?" ) else: meta = download_json(url) logger.info('Downloading sequence dataset "%s" with %s sequences.', meta["name"], len(meta["sequences"])) base_url = get_base_url(url) + "/" with Progress("Donwloading", len(meta["sequences"])) as progress: for sequence in meta["sequences"]: sequence_directory = os.path.join(path, sequence["name"]) os.makedirs(sequence_directory, exist_ok=True) data = { 'name': sequence["name"], 'fps': sequence["fps"], 'format': 'default' } annotations_url = join_url(base_url, sequence["annotations"]["url"]) try: download_uncompress(annotations_url, sequence_directory) except NetworkException as e: raise DatasetException( "Unable do download annotations bundle") except IOError as e: raise DatasetException( "Unable to extract annotations bundle, is the target directory writable and do you have enough space?" ) for cname, channel in sequence["channels"].items(): channel_directory = os.path.join( sequence_directory, cname) os.makedirs(channel_directory, exist_ok=True) channel_url = join_url(base_url, channel["url"]) try: download_uncompress(channel_url, channel_directory) except NetworkException as e: raise DatasetException( "Unable do download channel bundle") except IOError as e: raise DatasetException( "Unable to extract channel bundle, is the target directory writable and do you have enough space?" ) if "pattern" in channel: data["channels." + cname] = cname + os.path.sep + channel[ "pattern"] else: data["channels." + cname] = cname + os.path.sep write_properties( os.path.join(sequence_directory, 'sequence'), data) progress.relative(1) with open(os.path.join(path, "list.txt"), "w") as fp: for sequence in meta["sequences"]: fp.write('{}\n'.format(sequence["name"]))