def get_specs(force=False): """ Get spec.yaml's for build caches available on mirror """ global _cached_specs if _cached_specs: tty.debug("Using previously-retrieved specs") return _cached_specs if not spack.mirror.MirrorCollection(): tty.warn("No Spack mirrors are currently configured") return {} urls = set() for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join(mirror.fetch_url, _build_cache_relative_path) mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: tty.msg("Finding buildcaches in %s" % mirror_dir) if os.path.exists(mirror_dir): files = os.listdir(mirror_dir) for file in files: if re.search('spec.yaml', file): link = url_util.join(fetch_url_build_cache, file) urls.add(link) else: tty.msg("Finding buildcaches at %s" % url_util.format(fetch_url_build_cache)) p, links = web_util.spider( url_util.join(fetch_url_build_cache, 'index.html')) for link in links: if re.search("spec.yaml", link): urls.add(link) _cached_specs = [] for link in urls: with Stage(link, name="build_cache", keep=True) as stage: if force and os.path.exists(stage.save_filename): os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue with open(stage.save_filename, 'r') as f: # read the spec from the build cache file. All specs # in build caches are concrete (as they are built) so # we need to mark this spec concrete on read-in. spec = Spec.from_yaml(f) spec._mark_concrete() _cached_specs.append(spec) return _cached_specs
def get_specs(force=False): """ Get spec.yaml's for build caches available on mirror """ global _cached_specs if _cached_specs: tty.debug("Using previously-retrieved specs") return _cached_specs mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.warn("No Spack mirrors are currently configured") return {} path = str(spack.architecture.sys_type()) urls = set() for mirror_name, mirror_url in mirrors.items(): if mirror_url.startswith('file'): mirror = mirror_url.replace( 'file://', '') + "/" + _build_cache_relative_path tty.msg("Finding buildcaches in %s" % mirror) if os.path.exists(mirror): files = os.listdir(mirror) for file in files: if re.search('spec.yaml', file): link = 'file://' + mirror + '/' + file urls.add(link) else: tty.msg("Finding buildcaches on %s" % mirror_url) p, links = spider(mirror_url + "/" + _build_cache_relative_path) for link in links: if re.search("spec.yaml", link) and re.search(path, link): urls.add(link) _cached_specs = [] for link in urls: with Stage(link, name="build_cache", keep=True) as stage: if force and os.path.exists(stage.save_filename): os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue with open(stage.save_filename, 'r') as f: # read the spec from the build cache file. All specs # in build caches are concrete (as they are built) so # we need to mark this spec concrete on read-in. spec = Spec.from_yaml(f) spec._mark_concrete() _cached_specs.append(spec) return _cached_specs
def get_specs(force=False): """ Get spec.yaml's for build caches available on mirror """ global _cached_specs if _cached_specs: tty.debug("Using previously-retrieved specs") return _cached_specs mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.warn("No Spack mirrors are currently configured") return {} path = str(spack.architecture.sys_type()) urls = set() for key in mirrors: url = mirrors[key] if url.startswith('file'): mirror = url.replace('file://', '') + '/build_cache' tty.msg("Finding buildcaches in %s" % mirror) if os.path.exists(mirror): files = os.listdir(mirror) for file in files: if re.search('spec.yaml', file): link = 'file://' + mirror + '/' + file urls.add(link) else: tty.msg("Finding buildcaches on %s" % url) p, links = spider(url + "/build_cache") for link in links: if re.search("spec.yaml", link) and re.search(path, link): urls.add(link) _cached_specs = set() for link in urls: with Stage(link, name="build_cache", keep=True) as stage: if force and os.path.exists(stage.save_filename): os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue with open(stage.save_filename, 'r') as f: # read the spec from the build cache file. All specs # in build caches are concrete (as they are built) so # we need to mark this spec concrete on read-in. spec = spack.spec.Spec.from_yaml(f) spec._mark_concrete() _cached_specs.add(spec) return _cached_specs
def get_keys(install=False, trust=False, force=False): """ Get pgp public keys available on mirror with suffix .key or .pub """ if not spack.mirror.MirrorCollection(): tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join(mirror.fetch_url, _build_cache_relative_path) mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: tty.msg("Finding public keys in %s" % mirror_dir) files = os.listdir(str(mirror_dir)) for file in files: if re.search(r'\.key', file) or re.search(r'\.pub', file): link = url_util.join(fetch_url_build_cache, file) keys.add(link) else: tty.msg("Finding public keys at %s" % url_util.format(fetch_url_build_cache)) # For s3 mirror need to request index.html directly p, links = web_util.spider(url_util.join(fetch_url_build_cache, 'index.html'), depth=1) for link in links: if re.search(r'\.key', link) or re.search(r'\.pub', link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: if os.path.exists(stage.save_filename) and force: os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if trust: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -t to install all downloaded keys')
def test_spider_0(): pages, links = spider(root, depth=0) assert root in pages assert page_1 not in pages assert page_2 not in pages assert page_3 not in pages assert page_4 not in pages assert "This is the root page." in pages[root] assert root not in links assert page_1 in links assert page_2 not in links assert page_3 not in links assert page_4 not in links
def test_spider_1(): pages, links = spider(root, depth=1) assert root in pages assert page_1 in pages assert page_2 not in pages assert page_3 not in pages assert page_4 not in pages assert "This is the root page." in pages[root] assert "This is page 1." in pages[page_1] assert root not in links assert page_1 in links assert page_2 in links assert page_3 not in links assert page_4 not in links
def get_specs(): """ Get spec.yaml's for build caches available on mirror """ mirrors = spack.config.get_config('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") path = str(spack.architecture.sys_type()) specs = set() urls = set() from collections import defaultdict durls = defaultdict(list) for key in mirrors: url = mirrors[key] if url.startswith('file'): mirror = url.replace('file://', '') + '/build_cache' tty.msg("Finding buildcaches in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search('spec.yaml', file): link = 'file://' + mirror + '/' + file urls.add(link) else: tty.msg("Finding buildcaches on %s" % url) p, links = spider(url + "/build_cache") for link in links: if re.search("spec.yaml", link) and re.search(path, link): urls.add(link) for link in urls: with Stage(link, name="build_cache", keep=True) as stage: try: stage.fetch() except fs.FetchError: continue with open(stage.save_filename, 'r') as f: # read the spec from the build cache file. All specs # in build caches are concrete (as they aer built) so # we need to mark this spec concrete on read-in. spec = spack.spec.Spec.from_yaml(f) spec._mark_concrete() specs.add(spec) durls[spec].append(link) return specs, durls
def get_keys(install=False, trust=False, force=False): """ Get pgp public keys available on mirror """ mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for mirror_name, mirror_url in mirrors.items(): if mirror_url.startswith('file'): mirror = os.path.join( mirror_url.replace('file://', ''), _build_cache_relative_path) tty.msg("Finding public keys in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search(r'\.key', file): link = 'file://' + mirror + '/' + file keys.add(link) else: tty.msg("Finding public keys on %s" % mirror_url) p, links = spider(mirror_url + "/build_cache", depth=1) for link in links: if re.search(r'\.key', link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: if os.path.exists(stage.save_filename) and force: os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if trust: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -t to install all downloaded keys')
def get_specs(force=False, allarch=False): """ Get spec.yaml's for build caches available on mirror """ arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') arch_pattern = ('([^-]*-[^-]*-[^-]*)') if not allarch: arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os) regex_pattern = '%s(.*)(spec.yaml$)' % (arch_pattern) arch_re = re.compile(regex_pattern) if not spack.mirror.MirrorCollection(): tty.debug("No Spack mirrors are currently configured") return {} urls = set() for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join(mirror.fetch_url, _build_cache_relative_path) mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: tty.msg("Finding buildcaches in %s" % mirror_dir) if os.path.exists(mirror_dir): files = os.listdir(mirror_dir) for file in files: m = arch_re.search(file) if m: link = url_util.join(fetch_url_build_cache, file) urls.add(link) else: tty.msg("Finding buildcaches at %s" % url_util.format(fetch_url_build_cache)) p, links = web_util.spider( url_util.join(fetch_url_build_cache, 'index.html')) for link in links: m = arch_re.search(link) if m: urls.add(link) return try_download_specs(urls=urls, force=force)
def get_keys(install=False, trust=False, force=False): """ Get pgp public keys available on mirror """ mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for key in mirrors: url = mirrors[key] if url.startswith('file'): mirror = url.replace('file://', '') + '/build_cache' tty.msg("Finding public keys in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search('\.key', file): link = 'file://' + mirror + '/' + file keys.add(link) else: tty.msg("Finding public keys on %s" % url) p, links = spider(url + "/build_cache", depth=1) for link in links: if re.search("\.key", link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: if os.path.exists(stage.save_filename) and force: os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if trust: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -t to install all downloaded keys')
def test_spider_2(): pages, links = web_util.spider(root, depth=2) assert root in pages assert page_1 in pages assert page_2 in pages assert page_3 not in pages assert page_4 not in pages assert "This is the root page." in pages[root] assert "This is page 1." in pages[page_1] assert "This is page 2." in pages[page_2] assert root not in links assert page_1 in links assert page_1 in links assert page_2 in links assert page_3 in links assert page_4 in links
def test_spider_3(): pages, links = spider(root, depth=3) assert root in pages assert page_1 in pages assert page_2 in pages assert page_3 in pages assert page_4 in pages assert "This is the root page." in pages[root] assert "This is page 1." in pages[page_1] assert "This is page 2." in pages[page_2] assert "This is page 3." in pages[page_3] assert "This is page 4." in pages[page_4] assert root in links # circular link on page 3 assert page_1 in links assert page_1 in links assert page_2 in links assert page_3 in links assert page_4 in links
def get_keys(install=False, yes_to_all=False): """ Get pgp public keys available on mirror """ mirrors = spack.config.get_config('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for key in mirrors: url = mirrors[key] if url.startswith('file'): mirror = url.replace('file://', '') + '/build_cache' tty.msg("Finding public keys in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search('\.key', file): link = 'file://' + mirror + '/' + file keys.add(link) else: tty.msg("Finding public keys on %s" % url) p, links = spider(url + "/build_cache", depth=1) for link in links: if re.search("\.key", link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if yes_to_all: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -y to override')