def ensure_local(index_url, file_path, local_cache_path, force=False, progress_advance=None): localfiles = LocalFiles(local_cache_path, serverfiles=ServerFiles(server=index_url)) if force: localfiles.download(*file_path, callback=progress_advance) return localfiles.localpath_download(*file_path, callback=progress_advance)
def __init__(self): dir_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), "udpipe_models" ) self.serverfiles = ServerFiles(self.server_url) self.localfiles = LocalFiles(dir_path, serverfiles=self.serverfiles)
def ensure_local(domain, filename, force=False, progress_advance=None): localfiles = LocalFiles(local_cache_path(), serverfiles=ServerFiles(server=INDEX_URL)) if force: localfiles.download(domain, filename, callback=progress_advance) return localfiles.localpath_download( domain, filename, callback=progress_advance)
def tag_list(): """List of available tags and their pretty-print with indices""" server_url = "http://butler.fri.uni-lj.si/datasets/" PATH = os.path.join(data_dir(), "datasets") local_files = LocalFiles(PATH, serverfiles=ServerFiles(server=server_url)) local_info = local_files.serverfiles.allinfo() nested_tags = [i["tags"] for i in local_info.values() if i["tags"]] all_tags = sorted(list(set(itertools.chain(*nested_tags)))) w = max(len(t) for t in all_tags) n = int(75 / (w + 5)) s = [ "{:>3}-{:<{width}}".format(i, t, width=w) for i, t in enumerate(all_tags) ] c = "\n".join(["".join(s[x:x + n]) for x in range(0, len(s), n)]) return all_tags, c
import numpy as np from serverfiles import LocalFiles, ServerFiles from Orange.data import Table, Domain, StringVariable, DiscreteVariable from Orange.data import filter as table_filter from Orange.misc.environ import data_dir from orangecontrib.bioinformatics.widgets.utils.data import TableAnnotation domain = 'geo' _local_cache_path = os.path.join(data_dir(), domain) _all_info_file = os.path.join(_local_cache_path, '__INFO__') _server_url = 'http://download.biolab.si/datasets/geo/' pubmed_url = 'http://www.ncbi.nlm.nih.gov/pubmed/{}' server_files = ServerFiles(server=_server_url) local_files = LocalFiles(_local_cache_path, serverfiles=server_files) def is_cached(gds_id): return os.path.exists(os.path.join(_local_cache_path, gds_id + '.tab')) def info_cache(f): """Store content of __INFO__ file locally.""" def wrapper(): if not os.path.isfile(_all_info_file) or os.path.getsize(_all_info_file) == 0: with open(_all_info_file, 'w') as fp: json.dump(list(server_files.allinfo().items()), fp)
def list_remote(self): # type: () -> Dict[Tuple[str, str], dict] client = ServerFiles(server=self.INDEX_URL) return client.allinfo()
def list_remote(server: str) -> Dict[Tuple[str, ...], dict]: client = ServerFiles(server) return client.allinfo()