Пример #1
0
def buildcache_update_index(args):
    """Update a buildcache index."""
    outdir = '.'
    if args.mirror_url:
        outdir = args.mirror_url

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    bindist.generate_package_index(
        url_util.join(outdir, bindist.build_cache_relative_path()))
Пример #2
0
def update_index(mirror_url, update_keys=False):
    mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
    outdir = url_util.format(mirror.push_url)

    bindist.generate_package_index(
        url_util.join(outdir, bindist.build_cache_relative_path()))

    if update_keys:
        keys_url = url_util.join(outdir, bindist.build_cache_relative_path(),
                                 bindist.build_cache_keys_relative_path())

        bindist.generate_key_index(keys_url)
Пример #3
0
def get_keys(install=False, trust=False, force=False):
    """
    Get pgp public keys available on mirror
    with suffix .key or .pub
    """
    if not spack.mirror.MirrorCollection():
        tty.die("Please add a spack mirror to allow " +
                "download of build caches.")

    keys = set()

    for mirror in spack.mirror.MirrorCollection().values():
        fetch_url_build_cache = url_util.join(mirror.fetch_url,
                                              _build_cache_relative_path)

        mirror_dir = url_util.local_file_path(fetch_url_build_cache)
        if mirror_dir:
            tty.msg("Finding public keys in %s" % mirror_dir)
            files = os.listdir(str(mirror_dir))
            for file in files:
                if re.search(r'\.key', file) or re.search(r'\.pub', file):
                    link = url_util.join(fetch_url_build_cache, file)
                    keys.add(link)
        else:
            tty.msg("Finding public keys at %s" %
                    url_util.format(fetch_url_build_cache))
            # For s3 mirror need to request index.html directly
            p, links = web_util.spider(url_util.join(fetch_url_build_cache,
                                                     'index.html'),
                                       depth=1)

            for link in links:
                if re.search(r'\.key', link) or re.search(r'\.pub', link):
                    keys.add(link)

        for link in keys:
            with Stage(link, name="build_cache", keep=True) as stage:
                if os.path.exists(stage.save_filename) and force:
                    os.remove(stage.save_filename)
                if not os.path.exists(stage.save_filename):
                    try:
                        stage.fetch()
                    except fs.FetchError:
                        continue
            tty.msg('Found key %s' % link)
            if install:
                if trust:
                    Gpg.trust(stage.save_filename)
                    tty.msg('Added this key to trusted keys.')
                else:
                    tty.msg('Will not add this key to trusted keys.'
                            'Use -t to install all downloaded keys')
Пример #4
0
def get_specs(allarch=False):
    """
    Get spec.yaml's for build caches available on mirror
    """
    global _cached_specs
    arch = architecture.Arch(architecture.platform(), 'default_os',
                             'default_target')

    if not spack.mirror.MirrorCollection():
        tty.debug("No Spack mirrors are currently configured")
        return {}

    for mirror in spack.mirror.MirrorCollection().values():
        fetch_url_build_cache = url_util.join(mirror.fetch_url,
                                              _build_cache_relative_path)

        tty.debug('Finding buildcaches at {0}'.format(
            url_util.format(fetch_url_build_cache)))

        index_url = url_util.join(fetch_url_build_cache, 'index.json')

        try:
            _, _, file_stream = web_util.read_from_url(index_url,
                                                       'application/json')
            index_object = codecs.getreader('utf-8')(file_stream).read()
        except (URLError, web_util.SpackWebError) as url_err:
            tty.error('Failed to read index {0}'.format(index_url))
            tty.debug(url_err)
            # Continue on to the next mirror
            continue

        tmpdir = tempfile.mkdtemp()
        index_file_path = os.path.join(tmpdir, 'index.json')
        with open(index_file_path, 'w') as fd:
            fd.write(index_object)

        db_root_dir = os.path.join(tmpdir, 'db_root')
        db = spack_db.Database(None,
                               db_dir=db_root_dir,
                               enable_transaction_locking=False)

        db._read_from_file(index_file_path)
        spec_list = db.query_local(installed=False)

        for indexed_spec in spec_list:
            spec_arch = architecture.arch_for_spec(indexed_spec.architecture)
            if (allarch is True or spec_arch == arch):
                _cached_specs.add(indexed_spec)

    return _cached_specs
Пример #5
0
def mirror_set_url(args):
    """Change the URL of a mirror."""
    url = url_util.format(args.url)

    mirrors = spack.config.get('mirrors', scope=args.scope)
    if not mirrors:
        mirrors = syaml_dict()

    if args.name not in mirrors:
        tty.die("No mirror found with name %s." % args.name)

    entry = mirrors[args.name]

    try:
        fetch_url = entry['fetch']
        push_url = entry['push']
    except TypeError:
        fetch_url, push_url = entry, entry

    changes_made = False

    if args.push:
        changes_made = changes_made or push_url != url
        push_url = url
    else:
        changes_made = (
            changes_made or fetch_url != push_url or push_url != url)

        fetch_url, push_url = url, url

    items = [
        (
            (n, u)
            if n != args.name else (
                (n, {"fetch": fetch_url, "push": push_url})
                if fetch_url != push_url else (n, fetch_url)
            )
        )
        for n, u in mirrors.items()
    ]

    mirrors = syaml_dict(items)
    spack.config.set('mirrors', mirrors, scope=args.scope)

    if changes_made:
        tty.msg(
            "Changed%s url for mirror %s." %
            ((" (push)" if args.push else ""), args.name))
    else:
        tty.msg("Url already set for mirror %s." % args.name)
Пример #6
0
def mirror_add(args):
    """Add a mirror to Spack."""
    url = url_util.format(args.url)

    mirrors = spack.config.get('mirrors', scope=args.scope)
    if not mirrors:
        mirrors = syaml_dict()

    if args.name in mirrors:
        tty.die("Mirror with name %s already exists." % args.name)

    items = [(n, u) for n, u in mirrors.items()]
    items.insert(0, (args.name, url))
    mirrors = syaml_dict(items)
    spack.config.set('mirrors', mirrors, scope=args.scope)
Пример #7
0
def get_specs(force=False, allarch=False):
    """
    Get spec.yaml's for build caches available on mirror
    """
    arch = architecture.Arch(architecture.platform(), 'default_os',
                             'default_target')
    arch_pattern = ('([^-]*-[^-]*-[^-]*)')
    if not allarch:
        arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os)

    regex_pattern = '%s(.*)(spec.yaml$)' % (arch_pattern)
    arch_re = re.compile(regex_pattern)

    if not spack.mirror.MirrorCollection():
        tty.debug("No Spack mirrors are currently configured")
        return {}

    urls = set()
    for mirror in spack.mirror.MirrorCollection().values():
        fetch_url_build_cache = url_util.join(mirror.fetch_url,
                                              _build_cache_relative_path)

        mirror_dir = url_util.local_file_path(fetch_url_build_cache)
        if mirror_dir:
            tty.msg("Finding buildcaches in %s" % mirror_dir)
            if os.path.exists(mirror_dir):
                files = os.listdir(mirror_dir)
                for file in files:
                    m = arch_re.search(file)
                    if m:
                        link = url_util.join(fetch_url_build_cache, file)
                        urls.add(link)
        else:
            tty.msg("Finding buildcaches at %s" %
                    url_util.format(fetch_url_build_cache))
            p, links = web_util.spider(
                url_util.join(fetch_url_build_cache, 'index.html'))
            for link in links:
                m = arch_re.search(link)
                if m:
                    urls.add(link)

    return try_download_specs(urls=urls, force=force)
Пример #8
0
def generate_key_index(key_prefix, tmpdir=None):
    """Create the key index page.

    Creates (or replaces) the "index.json" page at the location given in
    key_prefix.  This page contains an entry for each key (.pub) under
    key_prefix.
    """

    tty.debug(' '.join(('Retrieving key.pub files from',
                        url_util.format(key_prefix), 'to build key index')))

    fingerprints = (entry[:-4]
                    for entry in web_util.list_url(key_prefix, recursive=False)
                    if entry.endswith('.pub'))

    remove_tmpdir = False

    keys_local = url_util.local_file_path(key_prefix)
    if keys_local:
        target = os.path.join(keys_local, 'index.json')
    else:
        if not tmpdir:
            tmpdir = tempfile.mkdtemp()
            remove_tmpdir = True
        target = os.path.join(tmpdir, 'index.json')

    index = {
        'keys':
        dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))
    }
    with open(target, 'w') as f:
        sjson.dump(index, f)

    if not keys_local:
        try:
            web_util.push_to_url(
                target,
                url_util.join(key_prefix, 'index.json'),
                keep_original=False,
                extra_args={'ContentType': 'application/json'})
        finally:
            if remove_tmpdir:
                shutil.rmtree(tmpdir)
Пример #9
0
Файл: s3.py Проект: mcuma/spack
def get_mirror_connection(url, url_type="push"):
    connection = {}
    # Try to find a mirror for potential connection information
    # Check to see if desired file starts with any of the mirror URLs
    rebuilt_path = url_util.format(url)
    # Gather dict of push URLS point to the value of the whole mirror
    mirror_dict = {
        x.push_url: x
        for x in spack.mirror.MirrorCollection().values()
    }  # noqa: E501
    # Ensure most specific URLs (longest) are presented first
    mirror_url_keys = mirror_dict.keys()
    mirror_url_keys = sorted(mirror_url_keys, key=len, reverse=True)
    for mURL in mirror_url_keys:
        # See if desired URL starts with the mirror's push URL
        if rebuilt_path.startswith(mURL):
            connection = mirror_dict[mURL].to_dict()[url_type]
            break
    return connection
Пример #10
0
def mirror_create(args):
    """Create a directory to be used as a spack mirror, and fill it with
       package archives."""
    if args.specs and args.all:
        raise SpackError("Cannot specify specs on command line if you"
                         " chose to mirror all specs with '--all'")
    elif args.file and args.all:
        raise SpackError("Cannot specify specs with a file ('-f') if you"
                         " chose to mirror all specs with '--all'")

    if not args.versions_per_spec:
        num_versions = 1
    elif args.versions_per_spec == 'all':
        num_versions = 'all'
    else:
        try:
            num_versions = int(args.versions_per_spec)
        except ValueError:
            raise SpackError("'--versions-per-spec' must be a number or 'all',"
                             " got '{0}'".format(args.versions_per_spec))

    # try to parse specs from the command line first.
    with spack.concretize.disable_compiler_existence_check():
        specs = spack.cmd.parse_specs(args.specs, concretize=True)

        # If there is a file, parse each line as a spec and add it to the list.
        if args.file:
            if specs:
                tty.die("Cannot pass specs on the command line with --file.")
            specs = _read_specs_from_file(args.file)

        if not specs:
            # If nothing is passed, use environment or all if no active env
            if not args.all:
                tty.die(
                    "No packages were specified.",
                    "To mirror all packages, use the '--all' option"
                    " (this will require significant time and space).")

            env = ev.get_env(args, 'mirror')
            if env:
                mirror_specs = env.specs_by_hash.values()
            else:
                specs = [Spec(n) for n in spack.repo.all_package_names()]
                mirror_specs = spack.mirror.get_all_versions(specs)
                mirror_specs.sort(key=lambda s: (s.name, s.version))
        else:
            # If the user asked for dependencies, traverse spec DAG get them.
            if args.dependencies:
                new_specs = set()
                for spec in specs:
                    spec.concretize()
                    for s in spec.traverse():
                        new_specs.add(s)
                specs = list(new_specs)

            # Skip external specs, as they are already installed
            external_specs = [s for s in specs if s.external]
            specs = [s for s in specs if not s.external]

            for spec in external_specs:
                msg = 'Skipping {0} as it is an external spec.'
                tty.msg(msg.format(spec.cshort_spec))

            if num_versions == 'all':
                mirror_specs = spack.mirror.get_all_versions(specs)
            else:
                mirror_specs = spack.mirror.get_matching_versions(
                    specs, num_versions=num_versions)

    mirror = spack.mirror.Mirror(args.directory
                                 or spack.config.get('config:source_cache'))

    directory = url_util.format(mirror.push_url)

    existed = web_util.url_exists(directory)

    # Actually do the work to create the mirror
    present, mirrored, error = spack.mirror.create(directory, mirror_specs)
    p, m, e = len(present), len(mirrored), len(error)

    verb = "updated" if existed else "created"
    tty.msg("Successfully %s mirror in %s" % (verb, directory),
            "Archive stats:", "  %-4d already present" % p, "  %-4d added" % m,
            "  %-4d failed to fetch." % e)
    if error:
        tty.error("Failed downloads:")
        colify(s.cformat("{name}{@version}") for s in error)
        sys.exit(1)
Пример #11
0
def build_tarball(spec,
                  outdir,
                  force=False,
                  rel=False,
                  unsigned=False,
                  allow_root=False,
                  key=None,
                  regenerate_index=False):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    if not spec.concrete:
        raise ValueError('spec must be concrete to build tarball')

    # set up some paths
    tmpdir = tempfile.mkdtemp()
    cache_prefix = build_cache_prefix(tmpdir)

    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
    tarfile_path = os.path.join(tarfile_dir, tarfile_name)
    spackfile_path = os.path.join(cache_prefix,
                                  tarball_path_name(spec, '.spack'))

    remote_spackfile_path = url_util.join(
        outdir, os.path.relpath(spackfile_path, tmpdir))

    mkdirp(tarfile_dir)
    if web_util.url_exists(remote_spackfile_path):
        if force:
            web_util.remove_url(remote_spackfile_path)
        else:
            raise NoOverwriteException(url_util.format(remote_spackfile_path))

    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))

    remote_specfile_path = url_util.join(
        outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))

    if web_util.url_exists(remote_specfile_path):
        if force:
            web_util.remove_url(remote_specfile_path)
        else:
            raise NoOverwriteException(url_util.format(remote_specfile_path))

    # make a copy of the install directory to work with
    workdir = os.path.join(tempfile.mkdtemp(), os.path.basename(spec.prefix))
    install_tree(spec.prefix, workdir, symlinks=True)

    # create info for later relocation and create tar
    write_buildinfo_file(spec.prefix, workdir, rel=rel)

    # optionally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        try:
            make_package_relative(workdir, spec, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            shutil.rmtree(tmpdir)
            tty.die(e)
    else:
        try:
            make_package_placeholder(workdir, spec, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            shutil.rmtree(tmpdir)
            tty.die(e)

    # create compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir,
                arcname='%s' % os.path.basename(spec.prefix))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = yaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    # Add original install prefix relative to layout root to spec.yaml.
    # This will be used to determine is the directory layout has changed.
    buildinfo = {}
    buildinfo['relative_prefix'] = os.path.relpath(spec.prefix,
                                                   spack.store.layout.root)
    spec_dict['buildinfo'] = buildinfo
    spec_dict['full_hash'] = spec.full_hash()

    tty.debug('The full_hash ({0}) of {1} will be written into {2}'.format(
        spec_dict['full_hash'], spec.name,
        url_util.format(remote_specfile_path)))
    tty.debug(spec.tree())

    with open(specfile_path, 'w') as outfile:
        outfile.write(syaml.dump(spec_dict))

    # sign the tarball and spec file with gpg
    if not unsigned:
        sign_tarball(key, force, specfile_path)
    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name='%s' % tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name='%s' % specfile_path, arcname='%s' % specfile_name)
        if not unsigned:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if not unsigned:
        os.remove('%s.asc' % specfile_path)

    web_util.push_to_url(spackfile_path,
                         remote_spackfile_path,
                         keep_original=False)
    web_util.push_to_url(specfile_path,
                         remote_specfile_path,
                         keep_original=False)

    try:
        # create an index.html for the build_cache directory so specs can be
        # found
        if regenerate_index:
            generate_package_index(
                url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir)))
    finally:
        shutil.rmtree(tmpdir)

    return None
Пример #12
0
def push_url_from_mirror_name(mirror_name):
    """Given a mirror name, return the URL on which to push binary packages."""
    mirror = spack.mirror.MirrorCollection().lookup(mirror_name)
    if mirror.name == "<unnamed>":
        raise ValueError('no mirror named "{0}"'.format(mirror_name))
    return url_util.format(mirror.push_url)
Пример #13
0
def buildcache_sync(args):
    """ Syncs binaries (and associated metadata) from one mirror to another.
    Requires an active environment in order to know which specs to sync.

    Args:
        src (str): Source mirror URL
        dest (str): Destination mirror URL
    """
    # Figure out the source mirror
    source_location = None
    if args.src_directory:
        source_location = args.src_directory
        scheme = url_util.parse(source_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--src-directory" expected a local path; got a URL, instead')
        # Ensure that the mirror lookup does not mistake this for named mirror
        source_location = 'file://' + source_location
    elif args.src_mirror_name:
        source_location = args.src_mirror_name
        result = spack.mirror.MirrorCollection().lookup(source_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=source_location))
    elif args.src_mirror_url:
        source_location = args.src_mirror_url
        scheme = url_util.parse(source_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=source_location))

    src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
    src_mirror_url = url_util.format(src_mirror.fetch_url)

    # Figure out the destination mirror
    dest_location = None
    if args.dest_directory:
        dest_location = args.dest_directory
        scheme = url_util.parse(dest_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--dest-directory" expected a local path; got a URL, instead')
        # Ensure that the mirror lookup does not mistake this for named mirror
        dest_location = 'file://' + dest_location
    elif args.dest_mirror_name:
        dest_location = args.dest_mirror_name
        result = spack.mirror.MirrorCollection().lookup(dest_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=dest_location))
    elif args.dest_mirror_url:
        dest_location = args.dest_mirror_url
        scheme = url_util.parse(dest_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=dest_location))

    dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
    dest_mirror_url = url_util.format(dest_mirror.fetch_url)

    # Get the active environment
    env = spack.cmd.require_active_env(cmd_name='buildcache sync')

    tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
        src_mirror_url, dest_mirror_url))

    build_cache_dir = bindist.build_cache_relative_path()
    buildcache_rel_paths = []

    tty.debug('Syncing the following specs:')
    for s in env.all_specs():
        tty.debug('  {0}{1}: {2}'.format('* ' if s in env.roots() else '  ',
                                         s.name, s.dag_hash()))

        buildcache_rel_paths.extend([
            os.path.join(build_cache_dir,
                         bindist.tarball_path_name(s, '.spack')),
            os.path.join(build_cache_dir,
                         bindist.tarball_name(s, '.spec.yaml')),
            os.path.join(build_cache_dir, bindist.tarball_name(s, '.cdashid'))
        ])

    tmpdir = tempfile.mkdtemp()

    try:
        for rel_path in buildcache_rel_paths:
            src_url = url_util.join(src_mirror_url, rel_path)
            local_path = os.path.join(tmpdir, rel_path)
            dest_url = url_util.join(dest_mirror_url, rel_path)

            tty.debug('Copying {0} to {1} via {2}'.format(
                src_url, dest_url, local_path))

            stage = Stage(src_url,
                          name="temporary_file",
                          path=os.path.dirname(local_path),
                          keep=True)

            try:
                stage.create()
                stage.fetch()
                web_util.push_to_url(local_path, dest_url, keep_original=True)
            except fs.FetchError as e:
                tty.debug(
                    'spack buildcache unable to sync {0}'.format(rel_path))
                tty.debug(e)
            finally:
                stage.destroy()
    finally:
        shutil.rmtree(tmpdir)
Пример #14
0
def test_url_join_local_paths():
    # Resolve local link against page URL

    # wrong:
    assert (url_util.join('s3://bucket/index.html',
                          '../other-bucket/document.txt') ==
            's3://bucket/other-bucket/document.txt')

    # correct - need to specify resolve_href=True:
    assert (url_util.join(
        's3://bucket/index.html',
        '../other-bucket/document.txt',
        resolve_href=True) == 's3://other-bucket/document.txt')

    # same as above: make sure several components are joined together correctly
    assert (url_util.join(
        # with resolve_href=True, first arg is the base url; can not be
        # broken up
        's3://bucket/index.html',

        # with resolve_href=True, remaining arguments are the components of
        # the local href that needs to be resolved
        '..',
        'other-bucket',
        'document.txt',
        resolve_href=True) == 's3://other-bucket/document.txt')

    # Append local path components to prefix URL

    # wrong:
    assert (url_util.join(
        'https://mirror.spack.io/build_cache', 'my-package',
        resolve_href=True) == 'https://mirror.spack.io/my-package')

    # correct - Need to specify resolve_href=False:
    assert (url_util.join(
        'https://mirror.spack.io/build_cache',
        'my-package',
        resolve_href=False) == 'https://mirror.spack.io/build_cache/my-package'
            )

    # same as above; make sure resolve_href=False is default
    assert (url_util.join(
        'https://mirror.spack.io/build_cache',
        'my-package') == 'https://mirror.spack.io/build_cache/my-package')

    # same as above: make sure several components are joined together correctly
    assert (url_util.join(
        # with resolve_href=False, first arg is just a prefix. No
        # resolution is done.  So, there should be no difference between
        # join('/a/b/c', 'd/e'),
        # join('/a/b', 'c', 'd/e'),
        # join('/a', 'b/c', 'd', 'e'), etc.
        'https://mirror.spack.io',
        'build_cache',
        'my-package') == 'https://mirror.spack.io/build_cache/my-package')

    # file:// URL path components are *NOT* canonicalized
    spack_root = spack.paths.spack_root

    join_result = url_util.join('/a/b/c', '$spack')
    assert (join_result == 'file:///a/b/c/$spack')  # not canonicalized
    format_result = url_util.format(join_result)
    # canoncalize by hand
    expected = url_util.format(
        os.path.abspath(os.path.join('/', 'a', 'b', 'c', '.' + spack_root)))
    assert (format_result == expected)

    # see test_url_join_absolute_paths() for more on absolute path components
    join_result = url_util.join('/a/b/c', '/$spack')
    assert (join_result == 'file:///$spack')  # not canonicalized
    format_result = url_util.format(join_result)
    expected = url_util.format(spack_root)
    assert (format_result == expected)

    # For s3:// URLs, the "netloc" (bucket) is considered part of the path.
    # Make sure join() can cross bucket boundaries in this case.
    args = ['s3://bucket/a/b', 'new-bucket', 'c']
    assert (url_util.join(*args) == 's3://bucket/a/b/new-bucket/c')

    args.insert(1, '..')
    assert (url_util.join(*args) == 's3://bucket/a/new-bucket/c')

    args.insert(1, '..')
    assert (url_util.join(*args) == 's3://bucket/new-bucket/c')

    # new-bucket is now the "netloc" (bucket name)
    args.insert(1, '..')
    assert (url_util.join(*args) == 's3://new-bucket/c')
Пример #15
0
def createtarball(args):
    """create a binary package from an existing install"""
    if args.spec_yaml:
        packages = set()
        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
        with open(args.spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))
    elif args.packages:
        packages = args.packages
    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    outdir = '.'
    if args.directory:
        outdir = args.directory

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    signkey = None
    if args.key:
        signkey = args.key

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            tty.debug('adding matching spec %s' % match.format())
            if "package" in args.target_type:
                specs.add(match)
            if "dependencies" not in args.target_type:
                # if the user does not want dependencies, stop here
                continue
            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    f_create = ft.partial(create_single_tarball,
                          outdir=outdir,
                          force=args.force,
                          relative=args.rel,
                          unsigned=args.unsigned,
                          allow_root=args.allow_root,
                          signkey=signkey,
                          rebuild_index=args.rebuild_index and args.jobs == 1,
                          catch_exceptions=args.jobs != 1)

    # default behavior (early termination) for one job
    if args.jobs == 1:
        for spec in specs:
            f_create(spec)

    else:
        # currently, specs cause an infinite recursion bug when pickled
        # -> as multiprocessing uses pickle internally, we need to transform
        #    specs prior to distributing the work via worker pool
        # TODO: check if specs can be pickled
        specs = [s.to_dict() for s in specs]

        pool = NoDaemonPool(args.jobs if args.jobs > 1 else mp.cpu_count())
        # chunksize=1 because we do not want to pre-allocate specs to workers
        # (since each package will need a different amount of time to be
        # compressed)
        retvals = pool.map(f_create, specs, chunksize=1)

        errors = [rv["error"] for rv in retvals if rv["error"] is not None]
        list(map(tty.error, errors))
        if len(errors) > 0:
            sys.exit(1)

        # perform rebuild of index unless user requested not to
        if args.rebuild_index:
            bindist.generate_package_index(outdir)
Пример #16
0
def _createtarball(env,
                   spec_yaml=None,
                   packages=None,
                   add_spec=True,
                   add_deps=True,
                   output_location=os.getcwd(),
                   signing_key=None,
                   force=False,
                   make_relative=False,
                   unsigned=False,
                   allow_root=False,
                   rebuild_index=False):
    if spec_yaml:
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            package = '/{0}'.format(s.dag_hash())
            matches = find_matching_specs(package, env=env)

    elif packages:
        matches = find_matching_specs(packages, env=env)

    elif env:
        matches = [env.specs_by_hash[h] for h in env.concretized_order]

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package spec, an active environment," +
                " or else a path to a yaml file containing a spec" +
                " to install")
    specs = set()

    mirror = spack.mirror.MirrorCollection().lookup(output_location)
    outdir = url_util.format(mirror.push_url)

    msg = 'Buildcache files will be output to %s/build_cache' % outdir
    tty.msg(msg)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            lookup = spack.store.db.query_one(match)

            if not add_spec:
                tty.debug('skipping matching root spec %s' % match.format())
            elif lookup is None:
                tty.debug('skipping uninstalled matching spec %s' %
                          match.format())
            else:
                tty.debug('adding matching spec %s' % match.format())
                specs.add(match)

            if not add_deps:
                continue

            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                # skip root, since it's handled above
                if d == 0:
                    continue

                lookup = spack.store.db.query_one(node)

                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                elif lookup is None:
                    tty.debug('skipping uninstalled depenendency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.debug('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, force, make_relative, unsigned,
                                  allow_root, signing_key, rebuild_index)
        except bindist.NoOverwriteException as e:
            tty.warn(e)
Пример #17
0
def createtarball(args):
    """create a binary package from an existing install"""
    if args.spec_yaml:
        packages = set()
        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
        with open(args.spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))
    elif args.packages:
        packages = args.packages
    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    outdir = '.'
    if args.directory:
        outdir = args.directory

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    signkey = None
    if args.key:
        signkey = args.key

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' %
                      match.format())
        else:
            tty.debug('adding matching spec %s' % match.format())
            specs.add(match)
            if args.no_deps is True:
                continue
            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.msg('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, args.force, args.rel,
                                  args.unsigned, args.allow_root, signkey,
                                  not args.no_rebuild_index)
        except Exception as e:
            tty.warn('%s' % e)
            pass
Пример #18
0
def mirror_set_url(args):
    """Change the URL of a mirror."""
    url = url_util.format(args.url)
    mirrors = spack.config.get('mirrors', scope=args.scope)
    if not mirrors:
        mirrors = syaml_dict()

    if args.name not in mirrors:
        tty.die("No mirror found with name %s." % args.name)

    entry = mirrors[args.name]
    key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]

    if any(value for value in key_values if value in args):
        incoming_data = {
            "url": url,
            "access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
            "access_token": args.s3_access_token,
            "profile": args.s3_profile,
            "endpoint_url": args.s3_endpoint_url
        }
    try:
        fetch_url = entry['fetch']
        push_url = entry['push']
    except TypeError:
        fetch_url, push_url = entry, entry

    changes_made = False

    if args.push:
        if isinstance(push_url, dict):
            changes_made = changes_made or push_url != incoming_data
            push_url = incoming_data
        else:
            changes_made = changes_made or push_url != url
            push_url = url
    else:
        if isinstance(push_url, dict):
            changes_made = (changes_made or push_url != incoming_data
                            or push_url != incoming_data)
            fetch_url, push_url = incoming_data, incoming_data
        else:
            changes_made = changes_made or push_url != url
            fetch_url, push_url = url, url

    items = [((n, u) if n != args.name else ((n, {
        "fetch": fetch_url,
        "push": push_url
    }) if fetch_url != push_url else (n, {
        "fetch": fetch_url,
        "push": fetch_url
    }))) for n, u in mirrors.items()]

    mirrors = syaml_dict(items)
    spack.config.set('mirrors', mirrors, scope=args.scope)

    if changes_made:
        tty.msg("Changed%s url or connection information for mirror %s." %
                ((" (push)" if args.push else ""), args.name))
    else:
        tty.msg("No changes made to mirror %s." % args.name)
Пример #19
0
def mirror_add(args):
    """Add a mirror to Spack."""
    url = url_util.format(args.url)
    spack.mirror.add(args.name, url, args.scope)
Пример #20
0
def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
                   output_location, key, force, rel, unsigned, allow_root,
                   no_rebuild_index):
    if spec_yaml:
        packages = set()
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))

    elif packages:
        packages = packages

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    mirror = spack.mirror.MirrorCollection().lookup(output_location)
    outdir = url_util.format(mirror.push_url)

    msg = 'Buildcache files will be output to %s/build_cache' % outdir
    tty.msg(msg)

    signkey = None
    if key:
        signkey = key

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            if add_spec:
                tty.debug('adding matching spec %s' % match.format())
                specs.add(match)
            else:
                tty.debug('skipping matching spec %s' % match.format())

            if not add_deps:
                continue

            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                # skip root, since it's handled above
                if d == 0:
                    continue

                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.debug('creating binary cache file for package %s ' % spec.format())
        bindist.build_tarball(spec, outdir, force, rel, unsigned, allow_root,
                              signkey, not no_rebuild_index)
Пример #21
0
def read_from_url(url, accept_content_type=None):
    url = url_util.parse(url)
    context = None

    verify_ssl = spack.config.get('config:verify_ssl')

    # Don't even bother with a context unless the URL scheme is one that uses
    # SSL certs.
    if uses_ssl(url):
        if verify_ssl:
            if __UNABLE_TO_VERIFY_SSL:
                # User wants SSL verification, but it cannot be provided.
                warn_no_ssl_cert_checking()
            else:
                # User wants SSL verification, and it *can* be provided.
                context = ssl.create_default_context()  # novm
        else:
            # User has explicitly indicated that they do not want SSL
            # verification.
            if not __UNABLE_TO_VERIFY_SSL:
                context = ssl._create_unverified_context()

    url_scheme = url.scheme
    url = url_util.format(url)
    if sys.platform == "win32" and url_scheme == "file":
        url = convert_to_posix_path(url)
    req = Request(url)

    content_type = None
    is_web_url = url_scheme in ('http', 'https')
    if accept_content_type and is_web_url:
        # Make a HEAD request first to check the content type.  This lets
        # us ignore tarballs and gigantic files.
        # It would be nice to do this with the HTTP Accept header to avoid
        # one round-trip.  However, most servers seem to ignore the header
        # if you ask for a tarball with Accept: text/html.
        req.get_method = lambda: "HEAD"
        resp = _urlopen(req, timeout=_timeout, context=context)

        content_type = get_header(resp.headers, 'Content-type')

    # Do the real GET request when we know it's just HTML.
    req.get_method = lambda: "GET"

    try:
        response = _urlopen(req, timeout=_timeout, context=context)
    except URLError as err:
        raise SpackWebError('Download failed: {ERROR}'.format(ERROR=str(err)))

    if accept_content_type and not is_web_url:
        content_type = get_header(response.headers, 'Content-type')

    reject_content_type = (accept_content_type and
                           (content_type is None or
                            not content_type.startswith(accept_content_type)))

    if reject_content_type:
        tty.debug("ignoring page {0}{1}{2}".format(
            url, " with content type " if content_type is not None else "",
            content_type or ""))

        return None, None, None

    return response.geturl(), response.headers, response