예제 #1
0
def install_tarball(spec, args):
    s = Spec(spec)
    if s.external or s.virtual:
        tty.warn("Skipping external or virtual package %s" % spec.format())
        return
    for d in s.dependencies(deptype=('link', 'run')):
        tty.msg("Installing buildcache for dependency spec %s" % d)
        install_tarball(d, args)
    package = spack.repo.get(spec)
    if s.concrete and package.installed and not args.force:
        tty.warn("Package for spec %s already installed." % spec.format())
    else:
        tarball = bindist.download_tarball(spec)
        if tarball:
            if args.sha256:
                checker = spack.util.crypto.Checker(args.sha256)
                msg = ('cannot verify checksum for "{0}"' ' [expected={1}]')
                msg = msg.format(tarball, args.sha256)
                if not checker.check(tarball):
                    raise spack.binary_distribution.NoChecksumException(msg)
                tty.debug('Verified SHA256 checksum of the build cache')

            tty.msg('Installing buildcache for spec %s' % spec.format())
            bindist.extract_tarball(spec, tarball, args.allow_root,
                                    args.unsigned, args.force)
            spack.hooks.post_install(spec)
            spack.store.db.add(spec, spack.store.layout)
        else:
            tty.die('Download of binary cache file for spec %s failed.' %
                    spec.format())
예제 #2
0
def install_single_tarball(spec,
                           args,
                           catch_exceptions=False,
                           perform_post_install=True):
    """Install a single tarball for given spec.
    NOTE: Does NOT install dependencies!"""
    s = Spec(spec)

    # use return value dictionary in order to allow for additional return
    # values in the future
    retval = {
        "reindex": False,
        "error": None,
    }

    if s.external or s.virtual:
        tty.warn("Skipping external or virtual package %s" % spec.format())
        return retval

    package = spack.repo.get(spec)
    if s.concrete and package.installed and not args.force:
        tty.warn("Package for spec %s already installed." % spec.format())
    else:
        tarball = bindist.download_tarball(spec)
        if tarball:
            tty.msg('Installing buildcache for spec %s' % spec.format())
            try:
                bindist.extract_tarball(spec, tarball, args.allow_root,
                                        args.unsigned, args.force)
                if perform_post_install:
                    # In multiprocessing situations, post_install-hooks have to
                    # be triggered after all tar balls have been extracted as
                    # functionatility might depend on files being present
                    # (i.e., extracted).
                    spack.hooks.post_install(spec)
                spack.store.db.add(spec, spack.store.layout)
            except spack.error.SpackError as e:
                if catch_exceptions:
                    retval["error"] = "Spec %s: %s" % (spec.format(), str(e))
                else:
                    raise e
            finally:
                retval["reindex"] = True
        else:
            retval["error"] = 'Download of binary cache file for spec '\
                              '%s failed.' % spec.format()
            if not catch_exceptions:
                tty.die(retval["error"])
    return retval
예제 #3
0
    def get_relative_projection_for_spec(self, spec):
        # Extensions are placed by their extendee, not by their own spec
        if spec.package.extendee_spec:
            spec = spec.package.extendee_spec

        p = spack.projections.get_projection(self.projections, spec)
        return spec.format(p) if p else ''
예제 #4
0
def _add_single_spec(spec, mirror, mirror_stats):
    tty.msg("Adding package {pkg} to mirror".format(
        pkg=spec.format("{name}{@version}")))
    num_retries = 3
    while num_retries > 0:
        try:
            with spec.package.stage as pkg_stage:
                pkg_stage.cache_mirror(mirror, mirror_stats)
                for patch in spec.package.all_patches():
                    if patch.stage:
                        patch.stage.cache_mirror(mirror, mirror_stats)
                    patch.clean()
            exception = None
            break
        except Exception as e:
            exc_tuple = sys.exc_info()
            exception = e
        num_retries -= 1

    if exception:
        if spack.config.get('config:debug'):
            traceback.print_exception(file=sys.stderr, *exc_tuple)
        else:
            tty.warn(
                "Error while fetching %s" % spec.cformat('{name}{@version}'),
                getattr(exception, 'message', exception))
        mirror_stats.error()
예제 #5
0
파일: buildcache.py 프로젝트: rtohid/spack
def createtarball(args):
    """create a binary package from an existing install"""
    if args.spec_yaml:
        packages = set()
        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
        with open(args.spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))
    elif args.packages:
        packages = args.packages
    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()
    outdir = '.'
    if args.directory:
        outdir = args.directory
    signkey = None
    if args.key:
        signkey = args.key

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.msg('Found at least one matching spec')

    for match in matches:
        tty.msg('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.msg('skipping external or virtual spec %s' % match.format())
        else:
            tty.msg('adding matching spec %s' % match.format())
            specs.add(match)
            tty.msg('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.msg('skipping external or virtual dependency %s' %
                            node.format())
                else:
                    tty.msg('adding dependency %s' % node.format())
                    specs.add(node)

    tty.msg('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.msg('creating binary cache file for package %s ' % spec.format())
        bindist.build_tarball(spec, outdir, args.force, args.rel,
                              args.unsigned, args.allow_root, signkey,
                              not args.no_rebuild_index)
예제 #6
0
    def relative_path_for_spec(self, spec):
        _check_concrete(spec)

        if spec.external:
            return spec.external_path

        path = spec.format(self.path_scheme)
        return path
예제 #7
0
    def relative_path_for_spec(self, spec):
        _check_concrete(spec)

        if spec.external:
            return spec.external_path

        path = spec.format(self.path_scheme)
        return path
예제 #8
0
def create(path, specs, skip_unstable_versions=False):
    """Create a directory to be used as a spack mirror, and fill it with
    package archives.

    Arguments:
        path: Path to create a mirror directory hierarchy in.
        specs: Any package versions matching these specs will be added \
            to the mirror.
        skip_unstable_versions: if true, this skips adding resources when
            they do not have a stable archive checksum (as determined by
            ``fetch_strategy.stable_target``)

    Return Value:
        Returns a tuple of lists: (present, mirrored, error)

        * present:  Package specs that were already present.
        * mirrored: Package specs that were successfully mirrored.
        * error:    Package specs that failed to mirror due to some error.

    This routine iterates through all known package versions, and
    it creates specs for those versions.  If the version satisfies any spec
    in the specs list, it is downloaded and added to the mirror.
    """
    parsed = url_util.parse(path)
    mirror_root = url_util.local_file_path(parsed)
    if not mirror_root:
        raise spack.error.SpackError(
            'MirrorCaches only work with file:// URLs')

    # automatically spec-ify anything in the specs array.
    specs = [
        s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s)
        for s in specs
    ]

    # Get the absolute path of the root before we start jumping around.
    if not os.path.isdir(mirror_root):
        try:
            mkdirp(mirror_root)
        except OSError as e:
            raise MirrorError("Cannot create directory '%s':" % mirror_root,
                              str(e))

    mirror_cache = spack.caches.MirrorCache(
        mirror_root, skip_unstable_versions=skip_unstable_versions)
    mirror_stats = MirrorStats()

    # Iterate through packages and download all safe tarballs for each
    for spec in specs:
        if spec.package.has_code:
            mirror_stats.next_spec(spec)
            _add_single_spec(spec, mirror_cache, mirror_stats)
        else:
            tty.msg("Skipping package {pkg} without code".format(
                pkg=spec.format("{name}{@version}")))

    return mirror_stats.stats()
예제 #9
0
def create_single_tarball(spec, outdir, force, relative, unsigned, allow_root,
                          signkey, rebuild_index, catch_exceptions):
    if isinstance(spec, dict):
        spec = spack.spec.Spec.from_dict(spec)
        spec.concretize()

    tty.msg('creating binary cache file for package %s ' % spec.format())
    # use return value dictionary in order to allow for additional return
    # values in the future
    retval = {"error": None}
    try:
        bindist.build_tarball(spec, outdir, force, relative, unsigned,
                              allow_root, signkey, rebuild_index)
    except spack.error.SpackError as e:
        if catch_exceptions:
            retval["error"] = "Spec %s: %s" % (spec.format(), str(e))
        else:
            # if we are not multiproccessing we can re-raise the exception
            raise e
    return retval
예제 #10
0
def listspecs(args):
    """list binary packages available from mirrors"""
    specs = bindist.get_specs(args.force)
    if args.packages:
        pkgs = set(args.packages)
        for pkg in pkgs:
            tty.msg("buildcache spec(s) matching " +
                    "%s and commands to install them" % pkgs)
            for spec in sorted(specs):
                if spec.satisfies(pkg):
                    tty.msg('Enter\nspack buildcache install /%s\n' %
                            spec.dag_hash(7) +
                            ' to install "%s"' %
                            spec.format())
    else:
        tty.msg("buildcache specs and commands to install them")
        for spec in sorted(specs):
            tty.msg('Enter\nspack buildcache install /%s\n' %
                    spec.dag_hash(7) +
                    ' to install "%s"' %
                    spec.format())
예제 #11
0
def fixup_macos_rpaths(spec):
    """Remove duplicate rpaths and make shared library IDs relocatable.

    Some autotools packages write their own ``-rpath`` entries in addition to
    those implicitly added by the Spack compiler wrappers. On Linux these
    duplicate rpaths are eliminated, but on macOS they result in multiple
    entries which makes it harder to adjust with ``install_name_tool
    -delete_rpath``.

    Furthermore, many autotools programs (on macOS) set a library's install
    paths to use absolute paths rather than relative paths.
    """
    if spec.external or spec.virtual:
        tty.warn(
            'external or virtual package cannot be fixed up: {0!s}'.format(
                spec))
        return False

    if 'platform=darwin' not in spec:
        raise NotImplementedError('fixup_macos_rpaths requires macOS')

    applied = 0

    libs = frozenset(
        ['lib', 'lib64', 'libexec', 'plugins', 'Library', 'Frameworks'])
    prefix = spec.prefix

    if not os.path.exists(prefix):
        raise RuntimeError(
            'Could not fix up install prefix spec {0} because it does '
            'not exist: {1!s}'.format(prefix, spec.name))

    # Explore the installation prefix of the spec
    for root, dirs, files in os.walk(prefix, topdown=True):
        dirs[:] = set(dirs) & libs
        for name in files:
            try:
                needed_fix = fixup_macos_rpath(root, name)
            except Exception as e:
                tty.warn(
                    "Failed to apply library fixups to: {0}/{1}: {2!s}".format(
                        root, name, e))
                needed_fix = False
            if needed_fix:
                applied += 1

    specname = spec.format('{name}{/hash:7}')
    if applied:
        tty.info('Fixed rpaths for {0:d} {1} installed to {2}'.format(
            applied, "binary" if applied == 1 else "binaries", specname))
    else:
        tty.debug('No rpath fixup needed for ' + specname)
예제 #12
0
파일: find.py 프로젝트: scrobey/spack
def find(parser, args):
    # Filter out specs that don't exist.
    query_specs = spack.cmd.parse_specs(args.query_specs)
    query_specs, nonexisting = partition_list(
        query_specs, lambda s: spack.db.exists(s.name))

    if nonexisting:
        msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
        msg += ", ".join(s.name for s in nonexisting)
        tty.msg(msg)

        if not query_specs:
            return

    specs = [
        s for s in spack.db.installed_package_specs()
        if not query_specs or any(s.satisfies(q) for q in query_specs)
    ]

    # Make a dict with specs keyed by architecture and compiler.
    index = index_by(specs, 'architecture', 'compiler')

    # Traverse the index and print out each package
    for architecture in index:
        tty.hline(architecture, char='=', color=spack.spec.architecture_color)
        for compiler in index[architecture]:
            tty.hline(compiler, char='-', color=spack.spec.compiler_color)

            specs = index[architecture][compiler]
            specs.sort()

            abbreviated = [s.format('$_$@$+$#', color=True) for s in specs]

            if args.paths:
                # Print one spec per line along with prefix path
                width = max(len(s) for s in abbreviated)
                width += 2
                format = "    %-{}s%s".format(width)

                for abbrv, spec in zip(abbreviated, specs):
                    print format % (abbrv, spec.prefix)

            elif args.full_specs:
                for spec in specs:
                    print spec.tree(indent=4, format='$_$@$+', color=True),
            else:
                max_len = max([len(s.name) for s in specs])
                max_len += 4

                for spec in specs:
                    format = '$-' + str(max_len) + '_$@$+$#'
                    print "   " + spec.format(format, color=True)
예제 #13
0
파일: common.py 프로젝트: key4hep/spack
def test_modules_default_symlink(module_type, mock_packages,
                                 mock_module_filename, mock_module_defaults,
                                 config):
    spec = spack.spec.Spec('[email protected]').concretized()
    mock_module_defaults(spec.format('{name}{@version}'))

    generator_cls = spack.modules.module_types[module_type]
    generator = generator_cls(spec, 'default')
    generator.write()

    link_path = os.path.join(os.path.dirname(mock_module_filename), 'default')
    assert os.path.islink(link_path)
    assert os.readlink(link_path) == mock_module_filename
예제 #14
0
def install_tarball(spec, args):
    s = spack.spec.Spec(spec)
    if s.external or s.virtual:
        tty.warn("Skipping external or virtual package %s" % spec.format())
        return
    for d in s.dependencies(deptype=('link', 'run')):
        tty.msg("Installing buildcache for dependency spec %s" % d)
        install_tarball(d, args)
    package = spack.repo.get(spec)
    if s.concrete and package.installed and not args.force:
        tty.warn("Package for spec %s already installed." % spec.format())
    else:
        tarball = bindist.download_tarball(spec)
        if tarball:
            tty.msg('Installing buildcache for spec %s' % spec.format())
            bindist.extract_tarball(spec, tarball, args.allow_root,
                                    args.unsigned, args.force)
            spack.hooks.post_install(spec)
            spack.store.store.reindex()
        else:
            tty.die('Download of binary cache file for spec %s failed.' %
                    spec.format())
예제 #15
0
파일: find.py 프로젝트: dshrader/spack
def find(parser, args):
    # Filter out specs that don't exist.
    query_specs = spack.cmd.parse_specs(args.query_specs)
    query_specs, nonexisting = partition_list(
        query_specs, lambda s: spack.db.exists(s.name))

    if nonexisting:
        msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
        msg += ", ".join(s.name for s in nonexisting)
        tty.msg(msg)

        if not query_specs:
            return

    specs = [s for s in spack.db.installed_package_specs()
             if not query_specs or any(s.satisfies(q) for q in query_specs)]

    # Make a dict with specs keyed by architecture and compiler.
    index = index_by(specs, 'architecture', 'compiler')

    # Traverse the index and print out each package
    for architecture in index:
        tty.hline(architecture, char='=', color=spack.spec.architecture_color)
        for compiler in index[architecture]:
            tty.hline(compiler, char='-', color=spack.spec.compiler_color)

            specs = index[architecture][compiler]
            specs.sort()

            abbreviated = [s.format('$_$@$+$#', color=True) for s in specs]

            if args.paths:
                # Print one spec per line along with prefix path
                width = max(len(s) for s in abbreviated)
                width += 2
                format = "    %-{}s%s".format(width)

                for abbrv, spec in zip(abbreviated, specs):
                    print format % (abbrv, spec.package.prefix)

            elif args.full_specs:
                for spec in specs:
                    print spec.tree(indent=4, format='$_$@$+', color=True),
            else:
                max_len = max([len(s.name) for s in specs])
                max_len += 4

                for spec in specs:
                    format = '$-' + str(max_len) + '_$@$+$#'
                    print "   " + spec.format(format, color=True)
예제 #16
0
파일: database.py 프로젝트: justintoo/spack
    def _assign_dependencies(self, hash_key, installs, data):
        # Add dependencies from other records in the install DB to
        # form a full spec.
        spec = data[hash_key].spec
        spec_dict = installs[hash_key]['spec']

        if 'dependencies' in spec_dict[spec.name]:
            yaml_deps = spec_dict[spec.name]['dependencies']
            for dname, dhash, dtypes in spack.spec.Spec.read_yaml_dep_specs(
                    yaml_deps):
                if dhash not in data:
                    tty.warn("Missing dependency not in database: ",
                             "%s needs %s-%s" % (
                                 spec.format('$_$/'), dname, dhash[:7]))
                    continue

                child = data[dhash].spec
                spec._add_dependency(child, dtypes)
예제 #17
0
    def _assign_dependencies(self, hash_key, installs, data):
        # Add dependencies from other records in the install DB to
        # form a full spec.
        spec = data[hash_key].spec
        spec_dict = installs[hash_key]['spec']

        if 'dependencies' in spec_dict[spec.name]:
            yaml_deps = spec_dict[spec.name]['dependencies']
            for dname, dhash, dtypes in spack.spec.Spec.read_yaml_dep_specs(
                    yaml_deps):
                if dhash not in data:
                    tty.warn(
                        "Missing dependency not in database: ",
                        "%s needs %s-%s" %
                        (spec.format('$_$#'), dname, dhash[:7]))
                    continue

                child = data[dhash].spec
                spec._add_dependency(child, dtypes)
예제 #18
0
def createtarball(args):
    """create a binary package from an existing install"""
    if not args.packages:
        tty.die("build cache file creation requires at least one" +
                " installed package argument")
    pkgs = set(args.packages)
    specs = set()
    outdir = '.'
    if args.directory:
        outdir = args.directory
    signkey = None
    if args.key:
        signkey = args.key

    matches = find_matching_specs(pkgs, False, False)
    for match in matches:
        if match.external or match.virtual:
            tty.msg('skipping external or virtual spec %s' %
                    match.format())
        else:
            tty.msg('adding matching spec %s' % match.format())
            specs.add(match)
            tty.msg('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.msg('skipping external or virtual dependency %s' %
                            node.format())
                else:
                    tty.msg('adding dependency %s' % node.format())
                    specs.add(node)

    tty.msg('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.msg('creating binary cache file for package %s ' % spec.format())
        bindist.build_tarball(spec, outdir, args.force, args.rel,
                              args.unsigned, args.allow_root, signkey)
예제 #19
0
    def relative_path_for_spec(self, spec):
        _check_concrete(spec)

        projection = spack.projections.get_projection(self.projections, spec)
        path = spec.format(projection)
        return path
예제 #20
0
def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
                   rel, unsigned, allow_root, no_rebuild_index):
    if spec_yaml:
        packages = set()
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))

    elif packages:
        packages = packages

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    outdir = '.'
    if directory:
        outdir = directory

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    signkey = None
    if key:
        signkey = key

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            tty.debug('adding matching spec %s' % match.format())
            specs.add(match)
            if no_deps is True:
                continue
            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.msg('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, force, rel, unsigned,
                                  allow_root, signkey, not no_rebuild_index)
        except Exception as e:
            tty.warn('%s' % e)
            pass
예제 #21
0
def _createtarball(env,
                   spec_yaml=None,
                   packages=None,
                   add_spec=True,
                   add_deps=True,
                   output_location=os.getcwd(),
                   signing_key=None,
                   force=False,
                   make_relative=False,
                   unsigned=False,
                   allow_root=False,
                   rebuild_index=False):
    if spec_yaml:
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            package = '/{0}'.format(s.dag_hash())
            matches = find_matching_specs(package, env=env)

    elif packages:
        matches = find_matching_specs(packages, env=env)

    elif env:
        matches = [env.specs_by_hash[h] for h in env.concretized_order]

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package spec, an active environment," +
                " or else a path to a yaml file containing a spec" +
                " to install")
    specs = set()

    mirror = spack.mirror.MirrorCollection().lookup(output_location)
    outdir = url_util.format(mirror.push_url)

    msg = 'Buildcache files will be output to %s/build_cache' % outdir
    tty.msg(msg)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            lookup = spack.store.db.query_one(match)

            if not add_spec:
                tty.debug('skipping matching root spec %s' % match.format())
            elif lookup is None:
                tty.debug('skipping uninstalled matching spec %s' %
                          match.format())
            else:
                tty.debug('adding matching spec %s' % match.format())
                specs.add(match)

            if not add_deps:
                continue

            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                # skip root, since it's handled above
                if d == 0:
                    continue

                lookup = spack.store.db.query_one(node)

                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                elif lookup is None:
                    tty.debug('skipping uninstalled depenendency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.debug('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, force, make_relative, unsigned,
                                  allow_root, signing_key, rebuild_index)
        except bindist.NoOverwriteException as e:
            tty.warn(e)
예제 #22
0
def display_specs(specs, **kwargs):
    mode = kwargs.get('mode', 'short')
    hashes = kwargs.get('long', False)
    namespace = kwargs.get('namespace', False)
    flags = kwargs.get('show_flags', False)
    variants = kwargs.get('variants', False)

    hlen = 7
    if kwargs.get('very_long', False):
        hashes = True
        hlen = None

    nfmt = '.' if namespace else '_'
    ffmt = '$%+' if flags else ''
    vfmt = '$+' if variants else ''
    format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)

    # Make a dict with specs keyed by architecture and compiler.
    index = index_by(specs, ('architecture', 'compiler'))

    # Traverse the index and print out each package
    for i, (architecture, compiler) in enumerate(sorted(index)):
        if i > 0:
            print

        header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
                                      architecture, spack.spec.compiler_color,
                                      compiler)
        tty.hline(colorize(header), char='-')

        specs = index[(architecture, compiler)]
        specs.sort()

        abbreviated = [s.format(format_string, color=True) for s in specs]
        if mode == 'paths':
            # Print one spec per line along with prefix path
            width = max(len(s) for s in abbreviated)
            width += 2
            format = "    %%-%ds%%s" % width

            for abbrv, spec in zip(abbreviated, specs):
                prefix = gray_hash(spec, hlen) if hashes else ''
                print prefix + (format % (abbrv, spec.prefix))

        elif mode == 'deps':
            for spec in specs:
                print(spec.tree(
                    format=format_string,
                    color=True,
                    indent=4,
                    prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))

        elif mode == 'short':
            # Print columns of output if not printing flags
            if not flags:

                def fmt(s):
                    string = ""
                    if hashes:
                        string += gray_hash(s, hlen) + ' '
                    string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)

                    return string

                colify(fmt(s) for s in specs)
            # Print one entry per line if including flags
            else:
                for spec in specs:
                    # Print the hash if necessary
                    hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
                    print(hsh + spec.format(format_string, color=True) + '\n')

        else:
            raise ValueError(
                "Invalid mode for display_specs: %s. Must be one of (paths,"
                "deps, short)." % mode)
예제 #23
0
def display_specs(specs, **kwargs):
    mode = kwargs.get('mode', 'short')
    hashes = kwargs.get('long', False)
    namespace = kwargs.get('namespace', False)

    hlen = 7
    if kwargs.get('very_long', False):
        hashes = True
        hlen = None

    nfmt = '.' if namespace else '_'
    format_string = '$%s$@$+' % nfmt
    flags = kwargs.get('show_flags', False)
    if flags:
        format_string = '$%s$@$%%+$+' % nfmt

    # Make a dict with specs keyed by architecture and compiler.
    index = index_by(specs, ('architecture', 'compiler'))

    # Traverse the index and print out each package
    for i, (architecture, compiler) in enumerate(sorted(index)):
        if i > 0:
            print

        header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
                                      architecture, spack.spec.compiler_color,
                                      compiler)
        tty.hline(colorize(header), char='-')

        specs = index[(architecture, compiler)]
        specs.sort()

        abbreviated = [s.format(format_string, color=True) for s in specs]
        if mode == 'paths':
            # Print one spec per line along with prefix path
            width = max(len(s) for s in abbreviated)
            width += 2
            format = "    %%-%ds%%s" % width

            for abbrv, spec in zip(abbreviated, specs):
                if hashes:
                    print(gray_hash(spec, hlen), )
                print(format % (abbrv, spec.prefix))

        elif mode == 'deps':
            for spec in specs:
                print(
                    spec.tree(format=format_string,
                              color=True,
                              indent=4,
                              prefix=(lambda s: gray_hash(s, hlen))
                              if hashes else None))

        elif mode == 'short':
            # Print columns of output if not printing flags
            if not flags:

                def fmt(s):
                    string = ""
                    if hashes:
                        string += gray_hash(s, hlen) + ' '
                    string += s.format('$-%s$@$+' % nfmt, color=True)

                    return string

                colify(fmt(s) for s in specs)
            # Print one entry per line if including flags
            else:
                for spec in specs:
                    # Print the hash if necessary
                    hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
                    print(hsh + spec.format(format_string, color=True) + '\n')

        else:
            raise ValueError(
                "Invalid mode for display_specs: %s. Must be one of (paths,"
                "deps, short)." % mode)  # NOQA: ignore=E501
예제 #24
0
    def relative_path_for_spec(self, spec):
        _check_concrete(spec)

        path = spec.format(self.path_scheme)
        return path