def download_sbu(path, overwrite=False): _DOWNLOAD_URLS = [ ('http://www3.cs.stonybrook.edu/~cvl/content/datasets/shadow_db/SBU-shadow.zip'), ] download_dir = os.path.join(path, 'downloads') makedirs(download_dir) for url in _DOWNLOAD_URLS: filename = download(url, path=path, overwrite=overwrite) # extract with zipfile.ZipFile(filename, "r") as zf: zf.extractall(path=path) print("Extracted", filename)
def download_ade(path, overwrite=False): _AUG_DOWNLOAD_URLS = [ ('http://data.csail.mit.edu/places/ADEchallenge/ADEChallengeData2016.zip', '219e1696abb36c8ba3a3afe7fb2f4b4606a897c7'), ('http://data.csail.mit.edu/places/ADEchallenge/release_test.zip', 'e05747892219d10e9243933371a497e905a4860c'), ] download_dir = os.path.join(path, 'downloads') makedirs(download_dir) for url, checksum in _AUG_DOWNLOAD_URLS: filename = download(url, path=download_dir, overwrite=overwrite, sha1_hash=checksum) # extract with zipfile.ZipFile(filename, "r") as zip_ref: zip_ref.extractall(path=path)
def download_city(path, overwrite=False): _CITY_DOWNLOAD_URLS = [('gtFine_trainvaltest.zip', '99f532cb1af174f5fcc4c5bc8feea8c66246ddbc'), ('leftImg8bit_trainvaltest.zip', '2c0b77ce9933cc635adda307fbba5566f5d9d404')] download_dir = os.path.join(path, 'downloads') makedirs(download_dir) for filename, checksum in _CITY_DOWNLOAD_URLS: if not check_sha1(filename, checksum): raise UserWarning('File {} is downloaded but the content hash does not match. ' \ 'The repo may be outdated or download may be incomplete. ' \ 'If the "repo_url" is overridden, consider switching to ' \ 'the default repo.'.format(filename)) # extract with zipfile.ZipFile(filename, "r") as zip_ref: zip_ref.extractall(path=path) print("Extracted", filename)
def download_voc(path, overwrite=False): _DOWNLOAD_URLS = [ ('http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar', '34ed68851bce2a36e2a223fa52c661d592c66b3c'), ('http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar', '41a8d6e12baa5ab18ee7f8f8029b9e11805b4ef1'), ('http://host.robots.ox.ac.uk/pascal/VOC/voc2012/VOCtrainval_11-May-2012.tar', '4e443f8a2eca6b1dac8a6c57641b67dd40621a49') ] makedirs(path) for url, checksum in _DOWNLOAD_URLS: filename = download(url, path=path, overwrite=overwrite, sha1_hash=checksum) # extract with tarfile.open(filename) as tar: tar.extractall(path=path)
def download_coco(path, overwrite=False): _DOWNLOAD_URLS = [ # ('http://images.cocodataset.org/zips/train2017.zip', # '10ad623668ab00c62c096f0ed636d6aff41faca5'), ('http://images.cocodataset.org/annotations/annotations_trainval2017.zip', '8551ee4bb5860311e79dace7e79cb91e432e78b3'), # ('http://images.cocodataset.org/zips/val2017.zip', # '4950dc9d00dbe1c933ee0170f5797584351d2a41'), # ('http://images.cocodataset.org/annotations/stuff_annotations_trainval2017.zip', # '46cdcf715b6b4f67e980b529534e79c2edffe084'), # test2017.zip, for those who want to attend the competition. # ('http://images.cocodataset.org/zips/test2017.zip', # '4e443f8a2eca6b1dac8a6c57641b67dd40621a49'), ] makedirs(path) for url, checksum in _DOWNLOAD_URLS: filename = download(url, path=path, overwrite=overwrite, sha1_hash=checksum) # extract with zipfile.ZipFile(filename) as zf: zf.extractall(path=path)
def download_aug(path, overwrite=False): _AUG_DOWNLOAD_URLS = [( 'http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/semantic_contours/benchmark.tgz', '7129e0a480c2d6afb02b517bb18ac54283bfaa35')] makedirs(path) for url, checksum in _AUG_DOWNLOAD_URLS: filename = download(url, path=path, overwrite=overwrite, sha1_hash=checksum) # extract with tarfile.open(filename) as tar: tar.extractall(path=path) shutil.move(os.path.join(path, 'benchmark_RELEASE'), os.path.join(path, 'VOCaug')) filenames = ['VOCaug/dataset/train.txt', 'VOCaug/dataset/val.txt'] # generate trainval.txt with open(os.path.join(path, 'VOCaug/dataset/trainval.txt'), 'w') as outfile: for fname in filenames: fname = os.path.join(path, fname) with open(fname) as infile: for line in infile: outfile.write(line)
def download_sbu(path, overwrite=False): _DOWNLOAD_URLS = [ ('http://www3.cs.stonybrook.edu/~cvl/content/datasets/shadow_db/SBU-shadow.zip'), ] download_dir = os.path.join(path, 'downloads') makedirs(download_dir) for url in _DOWNLOAD_URLS: filename = download(url, path=path, overwrite=overwrite) # extract with zipfile.ZipFile(filename, "r") as zf: zf.extractall(path=path) print("Extracted", filename) if __name__ == '__main__': args = parse_args() default_dir = os.path.join(root_path, 'datasets/sbu') if args.download_dir is not None: _TARGET_DIR = args.download_dir else: _TARGET_DIR = default_dir makedirs(_TARGET_DIR) if os.path.exists(default_dir): print('{} is already exist!'.format(default_dir)) else: try: os.symlink(_TARGET_DIR, default_dir) except Exception as e: print(e) download_sbu(_TARGET_DIR, overwrite=False)
def download_city(path, overwrite=False): _CITY_DOWNLOAD_URLS = [('gtFine_trainvaltest.zip', '99f532cb1af174f5fcc4c5bc8feea8c66246ddbc'), ('leftImg8bit_trainvaltest.zip', '2c0b77ce9933cc635adda307fbba5566f5d9d404')] download_dir = os.path.join(path, 'downloads') makedirs(download_dir) for filename, checksum in _CITY_DOWNLOAD_URLS: if not check_sha1(filename, checksum): raise UserWarning('File {} is downloaded but the content hash does not match. ' \ 'The repo may be outdated or download may be incomplete. ' \ 'If the "repo_url" is overridden, consider switching to ' \ 'the default repo.'.format(filename)) # extract with zipfile.ZipFile(filename, "r") as zip_ref: zip_ref.extractall(path=path) print("Extracted", filename) if __name__ == '__main__': args = parse_args() makedirs(os.path.expanduser('~/.torch/datasets')) if args.download_dir is not None: if os.path.isdir(_TARGET_DIR): os.remove(_TARGET_DIR) # make symlink os.symlink(args.download_dir, _TARGET_DIR) else: download_city(_TARGET_DIR, overwrite=False)