def clean(setup_dir, clutter_patterns, clutter_dirs): print('[setup] clean()') clutter_patterns_ = [pat for pat in clutter_patterns if not pat.endswith('/')] _clutter_dirs = [pat[:-1] for pat in clutter_patterns if pat.endswith('/')] clutter_dirs_ = _clutter_dirs + clutter_dirs util_path.remove_files_in_dir(setup_dir, clutter_patterns_, recursive=True, verbose=VERBOSE) for dir_ in clutter_dirs_: util_path.delete(dir_, verbose=VERBOSE, print_exists=False)
def clean(setup_dir, clutter_patterns, clutter_dirs): print('[setup] clean()') clutter_patterns_ = [ pat for pat in clutter_patterns if not pat.endswith('/') ] _clutter_dirs = [pat[:-1] for pat in clutter_patterns if pat.endswith('/')] clutter_dirs_ = _clutter_dirs + clutter_dirs util_path.remove_files_in_dir(setup_dir, clutter_patterns_, recursive=True, verbose=VERBOSE) for dir_ in clutter_dirs_: util_path.delete(dir_, verbose=VERBOSE, print_exists=False)
def grab_zipped_url(zipped_url, ensure=True, appname='utool', download_dir=None, force_commonprefix=True, cleanup=False, redownload=False, spoof=False): r""" downloads and unzips the url Args: zipped_url (str): url which must be either a .zip of a .tar.gz file ensure (bool): eager evaluation if True(default = True) appname (str): (default = 'utool') download_dir (str): containing downloading directory force_commonprefix (bool): (default = True) cleanup (bool): (default = False) redownload (bool): (default = False) spoof (bool): (default = False) CommandLine: python -m utool.util_grabdata --exec-grab_zipped_url --show Example: >>> # DISABLE_DOCTEST >>> from utool.util_grabdata import * # NOQA >>> import utool as ut >>> zipped_url = '?' >>> ensure = True >>> appname = 'utool' >>> download_dir = None >>> force_commonprefix = True >>> cleanup = False >>> redownload = False >>> spoof = False >>> result = grab_zipped_url(zipped_url, ensure, appname, download_dir, >>> force_commonprefix, cleanup, redownload, >>> spoof) >>> print(result) Examples: >>> from utool.util_grabdata import * # NOQA >>> zipped_url = 'https://lev.cs.rpi.edu/public/data/testdata.zip' >>> zipped_url = 'http://www.spam.com/eggs/data.zip' """ zipped_url = clean_dropbox_link(zipped_url) zip_fname = split(zipped_url)[1] data_name = split_archive_ext(zip_fname)[0] # Download zipfile to if download_dir is None: download_dir = util_cplat.get_app_resource_dir(appname) # Zipfile should unzip to: data_dir = join(download_dir, data_name) if ensure or redownload: if redownload: util_path.remove_dirs(data_dir) util_path.ensurepath(download_dir) if not exists(data_dir) or redownload: # Download and unzip testdata zip_fpath = realpath(join(download_dir, zip_fname)) #print('[utool] Downloading archive %s' % zip_fpath) if not exists(zip_fpath) or redownload: download_url(zipped_url, zip_fpath, spoof=spoof) unarchive_file(zip_fpath, force_commonprefix) if cleanup: util_path.delete(zip_fpath) # Cleanup if cleanup: util_path.assert_exists(data_dir) return util_path.unixpath(data_dir)