def _describe_system(): import platform as pl from datalad import get_encoding_info from datalad.utils import get_linux_distribution try: dist = get_linux_distribution() except Exception as exc: lgr.warning("Failed to get distribution information: %s", exc_str(exc)) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
def _describe_system(): import platform as pl from datalad import get_encoding_info from datalad.utils import get_linux_distribution try: dist = get_linux_distribution() except Exception as exc: ce = CapturedException(exc) lgr.warning("Failed to get distribution information: %s", ce) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), 'filesystem': { l: _get_fs_type(l, p) for l, p in [('CWD', Path.cwd()), ('TMP', Path(tempfile.gettempdir()) ), ('HOME', Path.home())] } }
def _describe_system(): import platform as pl from datalad import get_encoding_info if hasattr(pl, 'dist'): dist = pl.dist() else: # Python 3.8 removed .dist but recommended "distro" is slow, so we # try it only if needed try: import distro dist = distro.linux_distribution(full_distribution_name=False) except ImportError: lgr.info( "Please install 'distro' package to obtain distribution information" ) dist = tuple() except Exception as exc: lgr.warning( "No distribution information will be provided since 'distro' " "fails to import/run: %s", exc_str(exc) ) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
def time_addurls(self, exclude_autometa): lgr.warning("CSV: " + self.listfile.read_text()) ret = dl.addurls(self.ds, str(self.listfile), '{url}', '{filename}', exclude_autometa=exclude_autometa) assert not any(r['status'] == 'error' for r in ret)
def _describe_system(): import platform as pl from datalad import get_encoding_info if hasattr(pl, 'dist'): dist = pl.dist() else: # Python 3.8 removed .dist but recommended "distro" is slow, so we # try it only if needed try: import distro dist = distro.linux_distribution(full_distribution_name=False) except ImportError: lgr.info( "Please install 'distro' package to obtain distribution information" ) dist = tuple() except Exception as exc: lgr.warning( "No distribution information will be provided since 'distro' " "fails to import/run: %s", exc_str(exc)) dist = tuple() return { 'type': os.name, 'name': pl.system(), 'release': pl.release(), 'version': pl.version(), 'distribution': ' '.join([_t2s(dist), _t2s(pl.mac_ver()), _t2s(pl.win32_ver())]).rstrip(), 'max_path_length': get_max_path_length(getpwd()), 'encoding': get_encoding_info(), }
annex = AnnexRepo('.', create=False, init=False) # enable datalad special remote urls_to_register = defaultdict(list) # key: urls try: annex.call_annex(["enableremote", "datalad"]) # go through each and see where urls aren't yet under web # seems might have also --in=datalad to restrict w = annex.whereis([], options=['--all'], output='full') lgr.info("Got %d entries", len(w)) for k, rec in tqdm(w.items()): datalad_urls = get_remote_urls(rec, 'datalad') web_urls = set(get_remote_urls(rec, 'web')) for url in datalad_urls: if url not in web_urls: if 'openneuro.s3' in url or 'openfmri.s3' in url: urls_to_register[k].append(url) else: lgr.warning("Found unexpected url %s" % url) finally: # disable datalad special remote annex.remove_remote("datalad") # need to disable it first lgr.info("Got %d entries which could get new urls", len(urls_to_register)) for k, urls in tqdm(urls_to_register.items()): for url in urls: annex.call_annex( ["registerurl", '-c', 'annex.alwayscommit=false', k, url]) # to cause annex to commit all the changes annex.call_annex(["merge"]) annex.gc(allow_background=False)
urls_to_register = defaultdict(list) # key: urls try: annex._annex_custom_command([], ["git", "annex", "enableremote", "datalad"]) # go through each and see where urls aren't yet under web # seems might have also --in=datalad to restrict w = annex.whereis([], options=['--all'], output='full') lgr.info("Got %d entries", len(w)) for k, rec in tqdm(w.items()): datalad_urls = get_remote_urls(rec, 'datalad') web_urls = set(get_remote_urls(rec, 'web')) for url in datalad_urls: if url not in web_urls: if 'openneuro.s3' in url or 'openfmri.s3' in url: urls_to_register[k].append(url) else: lgr.warning("Found unexpected url %s" % url) finally: # disable datalad special remote annex.remove_remote("datalad") # need to disable it first lgr.info( "Got %d entries which could get new urls", len(urls_to_register) ) for k, urls in tqdm(urls_to_register.items()): for url in urls: annex._annex_custom_command( [], ["git", "annex", "registerurl", '-c', 'annex.alwayscommit=false', k, url] ) # to cause annex to commit all the changes