def sign_tarball(key, force, specfile_path): # Sign the packages if keys available if spack.util.gpg.Gpg.gpg() is None: raise NoGpgException("gpg2 is not available in $PATH .\n" "Use spack install gnupg and spack load gnupg.") if key is None: keys = Gpg.signing_keys() if len(keys) == 1: key = keys[0] if len(keys) > 1: raise PickKeyException(str(keys)) if len(keys) == 0: msg = "No default key available for signing.\n" msg += "Use spack gpg init and spack gpg create" msg += " to create a default key." raise NoKeyException(msg) if os.path.exists('%s.asc' % specfile_path): if force: os.remove('%s.asc' % specfile_path) else: raise NoOverwriteException('%s.asc' % specfile_path) Gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
def gpg_verify(args): """verify a signed package""" # TODO: Support the package format Spack creates. signature = args.signature if signature is None: signature = args.package + '.asc' Gpg.verify(signature, args.package)
def gpg_init(args): import_dir = args.import_dir if import_dir is None: import_dir = spack.paths.gpg_keys_path for root, _, filenames in os.walk(import_dir): for filename in filenames: if not filename.endswith('.key'): continue Gpg.trust(os.path.join(root, filename))
def get_keys(install=False, trust=False, force=False): """ Get pgp public keys available on mirror with suffix .key or .pub """ if not spack.mirror.MirrorCollection(): tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join(mirror.fetch_url, _build_cache_relative_path) mirror_dir = url_util.local_file_path(fetch_url_build_cache) if mirror_dir: tty.msg("Finding public keys in %s" % mirror_dir) files = os.listdir(str(mirror_dir)) for file in files: if re.search(r'\.key', file) or re.search(r'\.pub', file): link = url_util.join(fetch_url_build_cache, file) keys.add(link) else: tty.msg("Finding public keys at %s" % url_util.format(fetch_url_build_cache)) # For s3 mirror need to request index.html directly p, links = web_util.spider(url_util.join(fetch_url_build_cache, 'index.html'), depth=1) for link in links: if re.search(r'\.key', link) or re.search(r'\.pub', link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: if os.path.exists(stage.save_filename) and force: os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if trust: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -t to install all downloaded keys')
def extract_tarball(spec, filename, yes_to_all=False, force=False): """ extract binary tarball for given package into install area """ installpath = spec.prefix if os.path.exists(installpath): if force: shutil.rmtree(installpath) else: raise NoOverwriteException(str(installpath)) stagepath = os.path.dirname(filename) spackfile_name = tarball_name(spec, '.spack') spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, '.tar.gz') tarfile_path = os.path.join(stagepath, tarfile_name) specfile_name = tarball_name(spec, '.spec.yaml') specfile_path = os.path.join(stagepath, specfile_name) with closing(tarfile.open(spackfile_path, 'r')) as tar: tar.extractall(stagepath) if not yes_to_all: if os.path.exists('%s.asc' % specfile_path): Gpg.verify('%s.asc' % specfile_path, specfile_path) os.remove(specfile_path + '.asc') else: raise NoVerifyException() # get the sha256 checksum of the tarball checksum = checksum_tarball(tarfile_path) # get the sha256 checksum recorded at creation spec_dict = {} with open(specfile_path, 'r') as inputfile: content = inputfile.read() spec_dict = yaml.load(content) bchecksum = spec_dict['binary_cache_checksum'] # if the checksums don't match don't install if bchecksum['hash'] != checksum: raise NoChecksumException() # delay creating installpath until verification is complete mkdirp(installpath) with closing(tarfile.open(tarfile_path, 'r')) as tar: tar.extractall(path=join_path(installpath, '..')) os.remove(tarfile_path) os.remove(specfile_path) relocate_package(installpath)
def gpg_sign(args): key = args.key if key is None: keys = Gpg.signing_keys() if len(keys) == 1: key = keys[0] elif not keys: raise RuntimeError('no signing keys are available') else: raise RuntimeError('multiple signing keys are available; ' 'please choose one') output = args.output if not output: output = args.package + '.asc' # TODO: Support the package format Spack creates. Gpg.sign(key, args.package, output, args.clearsign)
def get_keys(install=False, trust=False, force=False): """ Get pgp public keys available on mirror """ mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for key in mirrors: url = mirrors[key] if url.startswith('file'): mirror = url.replace('file://', '') + '/build_cache' tty.msg("Finding public keys in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search('\.key', file): link = 'file://' + mirror + '/' + file keys.add(link) else: tty.msg("Finding public keys on %s" % url) p, links = spider(url + "/build_cache", depth=1) for link in links: if re.search("\.key", link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: if os.path.exists(stage.save_filename) and force: os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if trust: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -t to install all downloaded keys')
def get_keys(install=False, trust=False, force=False): """ Get pgp public keys available on mirror """ mirrors = spack.config.get('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for mirror_name, mirror_url in mirrors.items(): if mirror_url.startswith('file'): mirror = os.path.join( mirror_url.replace('file://', ''), _build_cache_relative_path) tty.msg("Finding public keys in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search(r'\.key', file): link = 'file://' + mirror + '/' + file keys.add(link) else: tty.msg("Finding public keys on %s" % mirror_url) p, links = spider(mirror_url + "/build_cache", depth=1) for link in links: if re.search(r'\.key', link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: if os.path.exists(stage.save_filename) and force: os.remove(stage.save_filename) if not os.path.exists(stage.save_filename): try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if trust: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -t to install all downloaded keys')
def sign_tarball(yes_to_all, key, force, specfile_path): # Sign the packages if keys available if not has_gnupg2(): raise NoGpgException() else: if key is None: keys = Gpg.signing_keys() if len(keys) == 1: key = keys[0] if len(keys) > 1: raise PickKeyException() if len(keys) == 0: raise NoKeyException() if os.path.exists('%s.asc' % specfile_path): if force: os.remove('%s.asc' % specfile_path) else: raise NoOverwriteException('%s.asc' % specfile_path) Gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
def get_keys(install=False, yes_to_all=False): """ Get pgp public keys available on mirror """ mirrors = spack.config.get_config('mirrors') if len(mirrors) == 0: tty.die("Please add a spack mirror to allow " + "download of build caches.") keys = set() for key in mirrors: url = mirrors[key] if url.startswith('file'): mirror = url.replace('file://', '') + '/build_cache' tty.msg("Finding public keys in %s" % mirror) files = os.listdir(mirror) for file in files: if re.search('\.key', file): link = 'file://' + mirror + '/' + file keys.add(link) else: tty.msg("Finding public keys on %s" % url) p, links = spider(url + "/build_cache", depth=1) for link in links: if re.search("\.key", link): keys.add(link) for link in keys: with Stage(link, name="build_cache", keep=True) as stage: try: stage.fetch() except fs.FetchError: continue tty.msg('Found key %s' % link) if install: if yes_to_all: Gpg.trust(stage.save_filename) tty.msg('Added this key to trusted keys.') else: tty.msg('Will not add this key to trusted keys.' 'Use -y to override')
def gpg_create(args): if args.export: old_sec_keys = Gpg.signing_keys() Gpg.create(name=args.name, email=args.email, comment=args.comment, expires=args.expires) if args.export: new_sec_keys = set(Gpg.signing_keys()) new_keys = new_sec_keys.difference(old_sec_keys) Gpg.export_keys(args.export, *new_keys)
def sign_tarball(key, force, specfile_path): # Sign the packages if keys available if not has_gnupg2(): raise NoGpgException( "gpg2 is not available in $PATH .\n" "Use spack install gnupg and spack load gnupg.") else: if key is None: keys = Gpg.signing_keys() if len(keys) == 1: key = keys[0] if len(keys) > 1: raise PickKeyException(str(keys)) if len(keys) == 0: msg = "No default key available for signing.\n" msg += "Use spack gpg init and spack gpg create" msg += " to create a default key." raise NoKeyException(msg) if os.path.exists('%s.asc' % specfile_path): if force: os.remove('%s.asc' % specfile_path) else: raise NoOverwriteException('%s.asc' % specfile_path) Gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
def gpg_list(args): Gpg.list(args.trusted, args.signing)
def extract_tarball(spec, filename, allow_root=False, unsigned=False, force=False): """ extract binary tarball for given package into install area """ if os.path.exists(spec.prefix): if force: shutil.rmtree(spec.prefix) else: raise NoOverwriteException(str(spec.prefix)) tmpdir = tempfile.mkdtemp() stagepath = os.path.dirname(filename) spackfile_name = tarball_name(spec, '.spack') spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, '.tar.gz') tarfile_path = os.path.join(tmpdir, tarfile_name) specfile_name = tarball_name(spec, '.spec.yaml') specfile_path = os.path.join(tmpdir, specfile_name) with closing(tarfile.open(spackfile_path, 'r')) as tar: tar.extractall(tmpdir) if not unsigned: if os.path.exists('%s.asc' % specfile_path): try: Gpg.verify('%s.asc' % specfile_path, specfile_path) except Exception as e: shutil.rmtree(tmpdir) tty.die(str(e)) else: shutil.rmtree(tmpdir) raise NoVerifyException( "Package spec file failed signature verification.\n" "Use spack buildcache keys to download " "and install a key for verification from the mirror.") # get the sha256 checksum of the tarball checksum = checksum_tarball(tarfile_path) # get the sha256 checksum recorded at creation spec_dict = {} with open(specfile_path, 'r') as inputfile: content = inputfile.read() spec_dict = syaml.load(content) bchecksum = spec_dict['binary_cache_checksum'] # if the checksums don't match don't install if bchecksum['hash'] != checksum: shutil.rmtree(tmpdir) raise NoChecksumException( "Package tarball failed checksum verification.\n" "It cannot be installed.") new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root)) # if the original relative prefix is in the spec file use it buildinfo = spec_dict.get('buildinfo', {}) old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix) # if the original relative prefix and new relative prefix differ the # directory layout has changed and the buildcache cannot be installed if old_relative_prefix != new_relative_prefix: shutil.rmtree(tmpdir) msg = "Package tarball was created from an install " msg += "prefix with a different directory layout.\n" msg += "It cannot be relocated." raise NewLayoutException(msg) # extract the tarball in a temp directory with closing(tarfile.open(tarfile_path, 'r')) as tar: tar.extractall(path=tmpdir) # the base of the install prefix is used when creating the tarball # so the pathname should be the same now that the directory layout # is confirmed workdir = os.path.join(tmpdir, os.path.basename(spec.prefix)) # cleanup os.remove(tarfile_path) os.remove(specfile_path) try: relocate_package(workdir, allow_root) except Exception as e: shutil.rmtree(workdir) tty.die(str(e)) # Delay creating spec.prefix until verification is complete # and any relocation has been done. else: install_tree(workdir, spec.prefix, symlinks=True) finally: shutil.rmtree(tmpdir)
def extract_tarball(spec, filename, allow_root=False, unsigned=False, force=False): """ extract binary tarball for given package into install area """ if os.path.exists(spec.prefix): if force: shutil.rmtree(spec.prefix) else: raise NoOverwriteException(str(spec.prefix)) tmpdir = tempfile.mkdtemp() stagepath = os.path.dirname(filename) spackfile_name = tarball_name(spec, '.spack') spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, '.tar.gz') tarfile_path = os.path.join(tmpdir, tarfile_name) specfile_name = tarball_name(spec, '.spec.yaml') specfile_path = os.path.join(tmpdir, specfile_name) with closing(tarfile.open(spackfile_path, 'r')) as tar: tar.extractall(tmpdir) if not unsigned: if os.path.exists('%s.asc' % specfile_path): try: Gpg.verify('%s.asc' % specfile_path, specfile_path) except Exception as e: shutil.rmtree(tmpdir) tty.die(str(e)) else: shutil.rmtree(tmpdir) raise NoVerifyException( "Package spec file failed signature verification.\n" "Use spack buildcache keys to download " "and install a key for verification from the mirror.") # get the sha256 checksum of the tarball checksum = checksum_tarball(tarfile_path) # get the sha256 checksum recorded at creation spec_dict = {} with open(specfile_path, 'r') as inputfile: content = inputfile.read() spec_dict = yaml.load(content) bchecksum = spec_dict['binary_cache_checksum'] # if the checksums don't match don't install if bchecksum['hash'] != checksum: shutil.rmtree(tmpdir) raise NoChecksumException( "Package tarball failed checksum verification.\n" "It cannot be installed.") new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root)) # if the original relative prefix is in the spec file use it buildinfo = spec_dict.get('buildinfo', {}) old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix) # if the original relative prefix and new relative prefix differ the # directory layout has changed and the buildcache cannot be installed if old_relative_prefix != new_relative_prefix: shutil.rmtree(tmpdir) msg = "Package tarball was created from an install " msg += "prefix with a different directory layout.\n" msg += "It cannot be relocated." raise NewLayoutException(msg) # extract the tarball in a temp directory with closing(tarfile.open(tarfile_path, 'r')) as tar: tar.extractall(path=tmpdir) # the base of the install prefix is used when creating the tarball # so the pathname should be the same now that the directory layout # is confirmed workdir = os.path.join(tmpdir, os.path.basename(spec.prefix)) # cleanup os.remove(tarfile_path) os.remove(specfile_path) try: relocate_package(workdir, allow_root) except Exception as e: shutil.rmtree(workdir) tty.die(str(e)) # Delay creating spec.prefix until verification is complete # and any relocation has been done. else: install_tree(workdir, spec.prefix, symlinks=True) finally: shutil.rmtree(tmpdir)
def gpg_untrust(args): """remove a key from the keyring""" Gpg.untrust(args.signing, *args.keys)
def gpg_trust(args): """add a key to the keyring""" Gpg.trust(args.keyfile)
def gpg_list(args): """list keys available in the keyring""" Gpg.list(args.trusted, args.signing)
def gpg_export(args): """export a secret key""" keys = args.keys if not keys: keys = Gpg.signing_keys() Gpg.export_keys(args.location, *keys)
def gpg_untrust(args): Gpg.untrust(args.signing, *args.keys)
def extract_tarball(spec, filename, allow_root=False, unsigned=False, force=False): """ extract binary tarball for given package into install area """ if os.path.exists(spec.prefix): if force: shutil.rmtree(spec.prefix) else: raise NoOverwriteException(str(spec.prefix)) tmpdir = tempfile.mkdtemp() stagepath = os.path.dirname(filename) spackfile_name = tarball_name(spec, '.spack') spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, '.tar.bz2') tarfile_path = os.path.join(tmpdir, tarfile_name) specfile_name = tarball_name(spec, '.spec.yaml') specfile_path = os.path.join(tmpdir, specfile_name) with closing(tarfile.open(spackfile_path, 'r')) as tar: tar.extractall(tmpdir) # older buildcache tarfiles use gzip compression if not os.path.exists(tarfile_path): tarfile_name = tarball_name(spec, '.tar.gz') tarfile_path = os.path.join(tmpdir, tarfile_name) if not unsigned: if os.path.exists('%s.asc' % specfile_path): try: suppress = config.get('config:suppress_gpg_warnings', False) Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress) except Exception as e: shutil.rmtree(tmpdir) tty.die(e) else: shutil.rmtree(tmpdir) raise NoVerifyException( "Package spec file failed signature verification.\n" "Use spack buildcache keys to download " "and install a key for verification from the mirror.") # get the sha256 checksum of the tarball checksum = checksum_tarball(tarfile_path) # get the sha256 checksum recorded at creation spec_dict = {} with open(specfile_path, 'r') as inputfile: content = inputfile.read() spec_dict = syaml.load(content) bchecksum = spec_dict['binary_cache_checksum'] # if the checksums don't match don't install if bchecksum['hash'] != checksum: shutil.rmtree(tmpdir) raise NoChecksumException( "Package tarball failed checksum verification.\n" "It cannot be installed.") new_relative_prefix = str( os.path.relpath(spec.prefix, spack.store.layout.root)) # if the original relative prefix is in the spec file use it buildinfo = spec_dict.get('buildinfo', {}) old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix) # if the original relative prefix and new relative prefix differ the # directory layout has changed and the buildcache cannot be installed if old_relative_prefix != new_relative_prefix: shutil.rmtree(tmpdir) msg = "Package tarball was created from an install " msg += "prefix with a different directory layout.\n" msg += "It cannot be relocated." raise NewLayoutException(msg) # extract the tarball in a temp directory with closing(tarfile.open(tarfile_path, 'r')) as tar: tar.extractall(path=tmpdir) # the base of the install prefix is used when creating the tarball # so the pathname should be the same now that the directory layout # is confirmed workdir = os.path.join(tmpdir, os.path.basename(spec.prefix)) # install_tree copies hardlinks # create a temporary tarfile from prefix and exract it to workdir # tarfile preserves hardlinks temp_tarfile_name = tarball_name(spec, '.tar') temp_tarfile_path = os.path.join(tmpdir, temp_tarfile_name) with closing(tarfile.open(temp_tarfile_path, 'w')) as tar: tar.add(name='%s' % workdir, arcname='.') with closing(tarfile.open(temp_tarfile_path, 'r')) as tar: tar.extractall(spec.prefix) os.remove(temp_tarfile_path) # cleanup os.remove(tarfile_path) os.remove(specfile_path) try: relocate_package(spec.prefix, spec, allow_root) except Exception as e: shutil.rmtree(spec.prefix) tty.die(e) else: manifest_file = os.path.join(spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name) if not os.path.exists(manifest_file): spec_id = spec.format('{name}/{hash:7}') tty.warn('No manifest file in tarball for spec %s' % spec_id) finally: shutil.rmtree(tmpdir)
def gpg_trust(args): Gpg.trust(args.keyfile)
def extract_tarball(spec, filename, allow_root=False, unsigned=False, force=False): """ extract binary tarball for given package into install area """ if os.path.exists(spec.prefix): if force: shutil.rmtree(spec.prefix) else: raise NoOverwriteException(str(spec.prefix)) tmpdir = tempfile.mkdtemp() stagepath = os.path.dirname(filename) spackfile_name = tarball_name(spec, '.spack') spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, '.tar.gz') tarfile_path = os.path.join(tmpdir, tarfile_name) specfile_name = tarball_name(spec, '.spec.yaml') specfile_path = os.path.join(tmpdir, specfile_name) with closing(tarfile.open(spackfile_path, 'r')) as tar: tar.extractall(tmpdir) # some buildcache tarfiles use bzip2 compression if not os.path.exists(tarfile_path): tarfile_name = tarball_name(spec, '.tar.bz2') tarfile_path = os.path.join(tmpdir, tarfile_name) if not unsigned: if os.path.exists('%s.asc' % specfile_path): try: suppress = config.get('config:suppress_gpg_warnings', False) Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress) except Exception as e: shutil.rmtree(tmpdir) raise e else: shutil.rmtree(tmpdir) raise NoVerifyException( "Package spec file failed signature verification.\n" "Use spack buildcache keys to download " "and install a key for verification from the mirror.") # get the sha256 checksum of the tarball checksum = checksum_tarball(tarfile_path) # get the sha256 checksum recorded at creation spec_dict = {} with open(specfile_path, 'r') as inputfile: content = inputfile.read() spec_dict = syaml.load(content) bchecksum = spec_dict['binary_cache_checksum'] # if the checksums don't match don't install if bchecksum['hash'] != checksum: shutil.rmtree(tmpdir) raise NoChecksumException( "Package tarball failed checksum verification.\n" "It cannot be installed.") new_relative_prefix = str( os.path.relpath(spec.prefix, spack.store.layout.root)) # if the original relative prefix is in the spec file use it buildinfo = spec_dict.get('buildinfo', {}) old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix) rel = buildinfo.get('relative_rpaths') # if the original relative prefix and new relative prefix differ the # directory layout has changed and the buildcache cannot be installed # if it was created with relative rpaths info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s' tty.debug(info % (old_relative_prefix, new_relative_prefix, rel)) # if (old_relative_prefix != new_relative_prefix and (rel)): # shutil.rmtree(tmpdir) # msg = "Package tarball was created from an install " # msg += "prefix with a different directory layout. " # msg += "It cannot be relocated because it " # msg += "uses relative rpaths." # raise NewLayoutException(msg) # extract the tarball in a temp directory with closing(tarfile.open(tarfile_path, 'r')) as tar: tar.extractall(path=tmpdir) # get the parent directory of the file .spack/binary_distribution # this should the directory unpacked from the tarball whose # name is unknown because the prefix naming is unknown bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0] workdir = re.sub('/.spack/binary_distribution$', '', bindist_file) tty.debug('workdir %s' % workdir) # install_tree copies hardlinks # create a temporary tarfile from prefix and exract it to workdir # tarfile preserves hardlinks temp_tarfile_name = tarball_name(spec, '.tar') temp_tarfile_path = os.path.join(tmpdir, temp_tarfile_name) with closing(tarfile.open(temp_tarfile_path, 'w')) as tar: tar.add(name='%s' % workdir, arcname='.') with closing(tarfile.open(temp_tarfile_path, 'r')) as tar: tar.extractall(spec.prefix) os.remove(temp_tarfile_path) # cleanup os.remove(tarfile_path) os.remove(specfile_path) try: relocate_package(spec, allow_root) except Exception as e: shutil.rmtree(spec.prefix) raise e else: manifest_file = os.path.join(spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name) if not os.path.exists(manifest_file): spec_id = spec.format('{name}/{hash:7}') tty.warn('No manifest file in tarball for spec %s' % spec_id) finally: shutil.rmtree(tmpdir) if os.path.exists(filename): os.remove(filename)