Пример #1
0
def versions(parser, args):
    pkg = spack.repo.get(args.package)

    tty.msg('Safe versions (already checksummed):')

    safe_versions = pkg.versions

    if not safe_versions:
        print('  Found no versions for {0}'.format(pkg.name))
        tty.debug('Manually add versions to the package.')
    else:
        colify(sorted(safe_versions, reverse=True), indent=2)

    tty.msg('Remote versions (not yet checksummed):')

    fetched_versions = pkg.fetch_remote_versions()
    remote_versions = set(fetched_versions).difference(safe_versions)

    if not remote_versions:
        if not fetched_versions:
            print('  Found no versions for {0}'.format(pkg.name))
            tty.debug('Check the list_url and list_depth attributes of the '
                      'package to help Spack find versions.')
        else:
            print('  Found no unchecksummed versions for {0}'.format(pkg.name))
    else:
        colify(sorted(remote_versions, reverse=True), indent=2)
Пример #2
0
def get_name(args):
    """Get the name of the package based on the supplied arguments.

    If a name was provided, always use that. Otherwise, if a URL was
    provided, extract the name from that. Otherwise, use a default.

    :param argparse.Namespace args: The arguments given to ``spack create``

    :returns: The name of the package
    :rtype: str
    """

    # Default package name
    name = 'example'

    if args.name:
        # Use a user-supplied name if one is present
        name = args.name
        tty.msg("Using specified package name: '{0}'".format(name))
    elif args.url:
        # Try to guess the package name based on the URL
        try:
            name = spack.url.parse_name(args.url)
            tty.msg("This looks like a URL for {0}".format(name))
        except spack.url.UndetectableNameError:
            tty.die("Couldn't guess a name for this package.",
                    "  Please report this bug. In the meantime, try running:",
                    "  `spack create --name <name> <url>`")

    if not valid_fully_qualified_module_name(name):
        tty.die("Package name can only contain a-z, 0-9, and '-'")

    return name
Пример #3
0
def rm(mtype, specs, args):
    """Deletes module files associated with items in specs"""
    module_cls = module_types[mtype]
    specs_with_modules = [
        spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
    modules = [module_cls(spec) for spec in specs_with_modules]

    if not modules:
        tty.msg('No module file matches your query')
        raise SystemExit(1)

    # Ask for confirmation
    if not args.yes_to_all:
        tty.msg(
            'You are about to remove {0} module files the following specs:\n'
            .format(mtype))
        spack.cmd.display_specs(specs_with_modules, long=True)
        print('')
        answer = tty.get_yes_or_no('Do you want to proceed?')
        if not answer:
            tty.die('Will not remove any module files')

    # Remove the module files
    for s in modules:
        s.remove()
Пример #4
0
def find(parser, args):
    # Filter out specs that don't exist.
    query_specs = spack.cmd.parse_specs(args.query_specs)
    query_specs, nonexisting = partition_list(
        query_specs, lambda s: spack.db.exists(s.name))

    if nonexisting:
        msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
        msg += ", ".join(s.name for s in nonexisting)
        tty.msg(msg)

        if not query_specs:
            return

    # Get all the specs the user asked for
    if not query_specs:
        specs = set(spack.db.installed_package_specs())
    else:
        results = [set(spack.db.get_installed(qs)) for qs in query_specs]
        specs = set.union(*results)

    if not args.mode:
        args.mode = 'short'

    if sys.stdout.isatty():
        tty.msg("%d installed packages." % len(specs))
    display_specs(specs, mode=args.mode)
Пример #5
0
    def child_execution(child_connection, input_stream):
        try:
            setup_package(pkg, dirty=dirty)
            function(input_stream)
            child_connection.send(None)
        except StopIteration as e:
            # StopIteration is used to stop installations
            # before the final stage, mainly for debug purposes
            tty.msg(e.message)
            child_connection.send(None)
        except:
            # catch ANYTHING that goes wrong in the child process
            exc_type, exc, tb = sys.exc_info()

            # Need to unwind the traceback in the child because traceback
            # objects can't be sent to the parent.
            tb_string = traceback.format_exc()

            # build up some context from the offending package so we can
            # show that, too.
            package_context = get_package_context(tb)

            build_log = None
            if hasattr(pkg, 'log_path'):
                build_log = pkg.log_path

            # make a pickleable exception to send to parent.
            msg = "%s: %s" % (str(exc_type.__name__), str(exc))

            ce = ChildError(msg, tb_string, build_log, package_context)
            child_connection.send(ce)

        finally:
            child_connection.close()
Пример #6
0
def create(parser, args):
    # Gather information about the package to be created
    name = get_name(args)
    url = get_url(args)
    versions, guesser = get_versions(args, name)
    build_system = get_build_system(args, guesser)

    # Create the package template object
    PackageClass = templates[build_system]
    package = PackageClass(name, url, versions)
    tty.msg("Created template for {0} package".format(package.name))

    # Create a directory for the new package
    repo = get_repository(args, name)
    pkg_path = repo.filename_for_package_name(package.name)
    if os.path.exists(pkg_path) and not args.force:
        tty.die('{0} already exists.'.format(pkg_path),
                '  Try running `spack create --force` to overwrite it.')
    else:
        mkdirp(os.path.dirname(pkg_path))

    # Write the new package file
    package.write(pkg_path)
    tty.msg("Created package file: {0}".format(pkg_path))

    # Open up the new package file in your $EDITOR
    spack.editor(pkg_path)
Пример #7
0
    def print_status(self, *specs, **kwargs):
        if kwargs.get("with_dependencies", False):
            specs = set(get_dependencies(specs))

        specs = sorted(specs, key=lambda s: s.name)
        in_view = list(map(self.get_spec, specs))

        for s, v in zip(specs, in_view):
            if not v:
                tty.error(self._croot +
                          'Package not linked: %s' % s.name)
            elif s != v:
                self.print_conflict(v, s, level="warn")

        in_view = list(filter(None, in_view))

        if len(specs) > 0:
            tty.msg("Packages linked in %s:" % self._croot[:-1])

            # avoid circular dependency
            import spack.cmd
            spack.cmd.display_specs(in_view, flags=True, variants=True,
                                    long=self.verbose)
        else:
            tty.warn(self._croot + "No packages found.")
Пример #8
0
    def do_install(self, **kwargs):
        """This class should call this version of the install method.
           Package implementations should override install().
        """
        # whether to keep the prefix on failure.  Default is to destroy it.
        keep_prefix = kwargs.get('keep_prefix', False)
        keep_stage  = kwargs.get('keep_stage', False)
        ignore_deps = kwargs.get('ignore_deps', False)

        if not self.spec.concrete:
            raise ValueError("Can only install concrete packages.")

        if os.path.exists(self.prefix):
            tty.msg("%s is already installed in %s." % (self.name, self.prefix))
            return

        if not ignore_deps:
            self.do_install_dependencies()

        self.do_patch()

        # Fork a child process to do the build.  This allows each
        # package authors to have full control over their environment,
        # etc. without offecting other builds that might be executed
        # in the same spack call.
        try:
            pid = os.fork()
        except OSError, e:
            raise InstallError("Unable to fork build process: %s" % e)
Пример #9
0
def fetch_tarballs(url, name, version):
    """Try to find versions of the supplied archive by scraping the web.

    Prompts the user to select how many to download if many are found.


    """
    versions = spack.util.web.find_versions_of_archive(url)
    rkeys = sorted(versions.keys(), reverse=True)
    versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))

    archives_to_fetch = 1
    if not versions:
        # If the fetch failed for some reason, revert to what the user provided
        versions = { version : url }
    elif len(versions) > 1:
        tty.msg("Found %s versions of %s:" % (len(versions), name),
                *spack.cmd.elide_list(
                    ["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
        print
        archives_to_fetch = tty.get_number(
            "Include how many checksums in the package file?",
            default=5, abort='q')

        if not archives_to_fetch:
            tty.die("Aborted.")

    sorted_versions = sorted(versions.keys(), reverse=True)
    sorted_urls = [versions[v] for v in sorted_versions]
    return sorted_versions[:archives_to_fetch], sorted_urls[:archives_to_fetch]
Пример #10
0
    def expand(self):
        tty.msg("Staging archive: %s" % self.archive_file)

        self.stage.chdir()
        if not self.archive_file:
            raise NoArchiveFileError("URLFetchStrategy couldn't find archive file",
                                      "Failed on expand() for URL %s" % self.url)

        decompress = decompressor_for(self.archive_file)

        # Expand all tarballs in their own directory to contain
        # exploding tarballs.
        tarball_container = os.path.join(self.stage.path, "spack-expanded-archive")
        mkdirp(tarball_container)
        os.chdir(tarball_container)
        decompress(self.archive_file)

        # If the tarball *didn't* explode, move
        # the expanded directory up & remove the protector directory.
        files = os.listdir(tarball_container)
        if len(files) == 1:
            expanded_dir = os.path.join(tarball_container, files[0])
            if os.path.isdir(expanded_dir):
                shutil.move(expanded_dir, self.stage.path)
                os.rmdir(tarball_container)

        # Set the wd back to the stage when done.
        self.stage.chdir()
Пример #11
0
def repo_remove(args):
    """Remove a repository from Spack's configuration."""
    repos = spack.config.get_config('repos', args.scope)
    path_or_namespace = args.path_or_namespace

    # If the argument is a path, remove that repository from config.
    canon_path = canonicalize_path(path_or_namespace)
    for repo_path in repos:
        repo_canon_path = canonicalize_path(repo_path)
        if canon_path == repo_canon_path:
            repos.remove(repo_path)
            spack.config.update_config('repos', repos, args.scope)
            tty.msg("Removed repository %s" % repo_path)
            return

    # If it is a namespace, remove corresponding repo
    for path in repos:
        try:
            repo = Repo(path)
            if repo.namespace == path_or_namespace:
                repos.remove(path)
                spack.config.update_config('repos', repos, args.scope)
                tty.msg("Removed repository %s with namespace '%s'."
                        % (repo.root, repo.namespace))
                return
        except RepoError as e:
            continue

    tty.die("No repository with path or namespace: %s"
            % path_or_namespace)
Пример #12
0
Файл: env.py Проект: LLNL/spack
def env_deactivate(args):
    if not args.shell:
        msg = [
            "This command works best with Spack's shell support",
            ""
        ] + spack.cmd.common.shell_init_instructions + [
            'Or, if you want to use `spack env activate` without initializing',
            'shell support, you can run one of these:',
            '',
            '    eval `spack env deactivate --sh`   # for bash/sh',
            '    eval `spack env deactivate --csh`  # for csh/tcsh',
        ]
        tty.msg(*msg)
        return 1

    if 'SPACK_ENV' not in os.environ:
        tty.die('No environment is currently active.')

    if args.shell == 'csh':
        sys.stdout.write('unsetenv SPACK_ENV;\n')
        sys.stdout.write('if ( $?SPACK_OLD_PROMPT ) '
                         'set prompt="$SPACK_OLD_PROMPT" && '
                         'unsetenv SPACK_OLD_PROMPT;\n')
        sys.stdout.write('unalias despacktivate;\n')

    else:
        sys.stdout.write('unset SPACK_ENV; export SPACK_ENV;\n')
        sys.stdout.write('unalias despacktivate;\n')
        sys.stdout.write('if [ -n "$SPACK_OLD_PS1" ]; then\n')
        sys.stdout.write('export PS1="$SPACK_OLD_PS1";\n')
        sys.stdout.write('unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n')
        sys.stdout.write('fi;\n')
Пример #13
0
def get_matching_versions(specs, **kwargs):
    """Get a spec for EACH known version matching any spec in the list."""
    matching = []
    for spec in specs:
        pkg = spec.package

        # Skip any package that has no known versions.
        if not pkg.versions:
            tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
            continue

        num_versions = kwargs.get('num_versions', 0)
        matching_spec = []
        for i, v in enumerate(reversed(sorted(pkg.versions))):
            # Generate no more than num_versions versions for each spec.
            if num_versions and i >= num_versions:
                break

            # Generate only versions that satisfy the spec.
            if v.satisfies(spec.versions):
                s = Spec(pkg.name)
                s.versions = VersionList([v])
                s.variants = spec.variants.copy()
                # This is needed to avoid hanging references during the
                # concretization phase
                s.variants.spec = s
                matching_spec.append(s)

        if not matching_spec:
            tty.warn("No known version matches spec: %s" % spec)
        matching.extend(matching_spec)

    return matching
Пример #14
0
    def fetch(self):
        """Downloads an archive or checks out code from a repository."""
        self.chdir()

        fetchers = [self.fetcher]

        # TODO: move mirror logic out of here and clean it up!
        if self.mirror_path:
            urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()]

            digest = None
            if isinstance(self.fetcher, fs.URLFetchStrategy):
                digest = self.fetcher.digest
            fetchers = [fs.URLFetchStrategy(url, digest)
                        for url in urls] + fetchers
            for f in fetchers:
                f.set_stage(self)

        for fetcher in fetchers:
            try:
                fetcher.fetch()
                break
            except spack.error.SpackError, e:
                tty.msg("Fetching from %s failed." % fetcher)
                tty.debug(e)
                continue
Пример #15
0
def relocate_package(workdir, allow_root):
    """
    Relocate the given package
    """
    buildinfo = read_buildinfo_file(workdir)
    new_path = spack.store.layout.root
    old_path = buildinfo['buildpath']
    rel = buildinfo.get('relative_rpaths', False)
    if rel:
        return

    tty.msg("Relocating package from",
            "%s to %s." % (old_path, new_path))
    path_names = set()
    for filename in buildinfo['relocate_textfiles']:
        path_name = os.path.join(workdir, filename)
        # Don't add backup files generated by filter_file during install step.
        if not path_name.endswith('~'):
            path_names.add(path_name)
    relocate.relocate_text(path_names, old_path, new_path)
    # If the binary files in the package were not edited to use
    # relative RPATHs, then the RPATHs need to be relocated
    if not rel:
        path_names = set()
        for filename in buildinfo['relocate_binaries']:
            path_name = os.path.join(workdir, filename)
            path_names.add(path_name)
        relocate.relocate_binary(path_names, old_path, new_path,
                                 allow_root)
Пример #16
0
def compiler_find(args):
    """Search either $PATH or a list of paths OR MODULES for compilers and
       add them to Spack's configuration.

    """
    paths = args.add_paths
    if not paths:
        paths = get_path('PATH')

    # Don't initialize compilers config via compilers.get_compiler_config.
    # Just let compiler_find do the
    # entire process and return an empty config from all_compilers
    # Default for any other process is init_config=True
    compilers = [c for c in spack.compilers.find_compilers(*paths)]
    new_compilers = []
    for c in compilers:
        arch_spec = ArchSpec(None, c.operating_system, c.target)
        same_specs = spack.compilers.compilers_for_spec(c.spec,
                                                        arch_spec,
                                                        args.scope)

        if not same_specs:
            new_compilers.append(c)

    if new_compilers:
        spack.compilers.add_compilers_to_config(new_compilers,
                                                scope=args.scope,
                                                init_config=False)
        n = len(new_compilers)
        s = 's' if n > 1 else ''
        filename = spack.config.get_config_filename(args.scope, 'compilers')
        tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
        colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
    else:
        tty.msg("Found no new compilers")
Пример #17
0
def _stop_at_phase_during_install(args, calling_fn, phase_mapping):
    if not args.package:
        tty.die("configure requires at least one package argument")

    # TODO: to be refactored with code in install
    specs = spack.cmd.parse_specs(args.package, concretize=True)
    if len(specs) != 1:
        tty.error('only one spec can be installed at a time.')
    spec = specs.pop()
    pkg = spec.package
    try:
        key = [cls for cls in phase_mapping if isinstance(pkg, cls)].pop()
        phase = phase_mapping[key]
        # Install package dependencies if needed
        parser = argparse.ArgumentParser()
        inst.setup_parser(parser)
        tty.msg('Checking dependencies for {0}'.format(args.package))
        cli_args = ['-v'] if args.verbose else []
        install_args = parser.parse_args(cli_args + ['--only=dependencies'])
        install_args.package = args.package
        inst.install(parser, install_args)
        # Install package and stop at the given phase
        cli_args = ['-v'] if args.verbose else []
        install_args = parser.parse_args(cli_args + ['--only=package'])
        install_args.package = args.package
        inst.install(parser, install_args, stop_at=phase)
    except IndexError:
        tty.error(
            'Package {0} has no {1} phase, or its {1} phase is not separated from install'.format(  # NOQA: ignore=E501
                spec.name, calling_fn.__name__)
        )
Пример #18
0
 def autoreconf(self, spec, prefix):
     """Not needed usually, configure should be already there"""
     # If configure exists nothing needs to be done
     if os.path.exists(self.configure_abs_path):
         return
     # Else try to regenerate it
     autotools = ['m4', 'autoconf', 'automake', 'libtool']
     missing = [x for x in autotools if x not in spec]
     if missing:
         msg = 'Cannot generate configure: missing dependencies {0}'
         raise RuntimeError(msg.format(missing))
     tty.msg('Configure script not found: trying to generate it')
     tty.warn('*********************************************************')
     tty.warn('* If the default procedure fails, consider implementing *')
     tty.warn('*        a custom AUTORECONF phase in the package       *')
     tty.warn('*********************************************************')
     with working_dir(self.configure_directory):
         m = inspect.getmodule(self)
         # This part should be redundant in principle, but
         # won't hurt
         m.libtoolize()
         m.aclocal()
         # This line is what is needed most of the time
         # --install, --verbose, --force
         autoreconf_args = ['-ivf']
         if 'pkg-config' in spec:
             autoreconf_args += [
                 '-I',
                 join_path(spec['pkg-config'].prefix, 'share', 'aclocal'),
             ]
         autoreconf_args += self.autoreconf_extra_args
         m.autoreconf(*autoreconf_args)
Пример #19
0
Файл: env.py Проект: LLNL/spack
def env_remove(args):
    """Remove a *named* environment.

    This removes an environment managed by Spack. Directory environments
    and `spack.yaml` files embedded in repositories should be removed
    manually.
    """
    read_envs = []
    for env_name in args.rm_env:
        env = ev.read(env_name)
        read_envs.append(env)

    if not args.yes_to_all:
        answer = tty.get_yes_or_no(
            'Really remove %s %s?' % (
                string.plural(len(args.rm_env), 'environment', show_n=False),
                string.comma_and(args.rm_env)),
            default=False)
        if not answer:
            tty.die("Will not remove any environments")

    for env in read_envs:
        if env.active:
            tty.die("Environment %s can't be removed while activated.")

        env.destroy()
        tty.msg("Successfully removed environment '%s'" % env.name)
Пример #20
0
    def fetch(self):
        path = re.sub('^file://', '', self.url)

        # check whether the cache file exists.
        if not os.path.isfile(path):
            raise NoCacheError('No cache of %s' % path)

        self.stage.chdir()

        # remove old symlink if one is there.
        filename = self.stage.save_filename
        if os.path.exists(filename):
            os.remove(filename)

        # Symlink to local cached archive.
        os.symlink(path, filename)

        # Remove link if checksum fails, or subsequent fetchers
        # will assume they don't need to download.
        if self.digest:
            try:
                self.check()
            except ChecksumError:
                os.remove(self.archive_file)
                raise

        # Notify the user how we fetched.
        tty.msg('Using cached archive: %s' % path)
Пример #21
0
def rm(module_type, specs, args):
    """Deletes the module files associated with every spec in specs, for every
    module type in module types.
    """

    module_cls = spack.modules.module_types[module_type]
    module_exist = lambda x: os.path.exists(module_cls(x).layout.filename)

    specs_with_modules = [spec for spec in specs if module_exist(spec)]

    modules = [module_cls(spec) for spec in specs_with_modules]

    if not modules:
        tty.die('No module file matches your query')

    # Ask for confirmation
    if not args.yes_to_all:
        msg = 'You are about to remove {0} module files for:\n'
        tty.msg(msg.format(module_type))
        spack.cmd.display_specs(specs_with_modules, long=True)
        print('')
        answer = tty.get_yes_or_no('Do you want to proceed?')
        if not answer:
            tty.die('Will not remove any module files')

    # Remove the module files
    for s in modules:
        s.remove()
Пример #22
0
def dependents(parser, args):
    specs = spack.cmd.parse_specs(args.spec)
    if len(specs) != 1:
        tty.die("spack dependents takes only one spec.")

    if args.installed:
        spec = spack.cmd.disambiguate_spec(specs[0])

        tty.msg("Dependents of %s" % spec.cformat('$_$@$%@$/'))
        deps = spack.store.db.installed_relatives(
            spec, 'parents', args.transitive)
        if deps:
            spack.cmd.display_specs(deps, long=True)
        else:
            print("No dependents")

    else:
        spec = specs[0]
        ideps = inverted_dependencies()

        dependents = get_dependents(spec.name, ideps, args.transitive)
        dependents.remove(spec.name)
        if dependents:
            colify(sorted(dependents))
        else:
            print("No dependents")
Пример #23
0
def from_list_url(pkg):
    """If a package provides a URL which lists URLs for resources by
       version, this can can create a fetcher for a URL discovered for
       the specified package's version."""
    if pkg.list_url:
        try:
            versions = pkg.fetch_remote_versions()
            try:
                # get a URL, and a checksum if we have it
                url_from_list = versions[pkg.version]
                checksum = None

                # try to find a known checksum for version, from the package
                version = pkg.version
                if version in pkg.versions:
                    args = pkg.versions[version]
                    checksum = next(
                        (v for k, v in args.items() if k in crypto.hashes),
                        args.get('checksum'))

                # construct a fetcher
                return URLFetchStrategy(url_from_list, checksum)
            except KeyError:
                tty.msg("Cannot find version %s in url_list" % pkg.version)

        except BaseException:
            # TODO: Don't catch BaseException here! Be more specific.
            tty.msg("Could not determine url from list_url.")
Пример #24
0
def repo_add(args):
    """Add a package source to Spack's configuration."""
    path = args.path

    # real_path is absolute and handles substitution.
    canon_path = canonicalize_path(path)

    # check if the path exists
    if not os.path.exists(canon_path):
        tty.die("No such file or directory: %s" % path)

    # Make sure the path is a directory.
    if not os.path.isdir(canon_path):
        tty.die("Not a Spack repository: %s" % path)

    # Make sure it's actually a spack repository by constructing it.
    repo = Repo(canon_path)

    # If that succeeds, finally add it to the configuration.
    repos = spack.config.get_config('repos', args.scope)
    if not repos: repos = []

    if repo.root in repos or path in repos:
        tty.die("Repository is already registered with Spack: %s" % path)

    repos.insert(0, canon_path)
    spack.config.update_config('repos', repos, args.scope)
    tty.msg("Created repo with namespace '%s'." % repo.namespace)
Пример #25
0
def bootstrap(parser, args, **kwargs):
    kwargs.update({
        'keep_prefix': args.keep_prefix,
        'keep_stage': args.keep_stage,
        'install_deps': 'dependencies',
        'make_jobs': args.jobs,
        'verbose': args.verbose,
        'dirty': args.dirty
    })

    # Define requirement dictionary defining general specs which need
    # to be satisfied, and the specs to install when the general spec
    # isn't satisfied.
    requirement_dict = {'environment-modules': 'environment-modules~X'}

    for requirement in requirement_dict:
        installed_specs = spack.store.db.query(requirement)
        if(len(installed_specs) > 0):
            tty.msg("Requirement %s is satisfied with installed "
                    "package %s" % (requirement, installed_specs[0]))
        else:
            # Install requirement
            spec_to_install = spack.spec.Spec(requirement_dict[requirement])
            spec_to_install.concretize()
            tty.msg("Installing %s to satisfy requirement for %s" %
                    (spec_to_install, requirement))
            kwargs['explicit'] = True
            package = spack.repo.get(spec_to_install)
            package.do_install(**kwargs)
Пример #26
0
def get_build_system(args, guesser):
    """Determine the build system template.

    If a template is specified, always use that. Otherwise, if a URL
    is provided, download the tarball and peek inside to guess what
    build system it uses. Otherwise, use a generic template by default.

    Args:
        args (argparse.Namespace): The arguments given to ``spack create``
        guesser (BuildSystemGuesser): The first_stage_function given to
            ``spack checksum`` which records the build system it detects

    Returns:
        str: The name of the build system template to use
    """

    # Default template
    template = 'generic'

    if args.template:
        # Use a user-supplied template if one is present
        template = args.template
        tty.msg("Using specified package template: '{0}'".format(template))
    elif args.url:
        # Use whatever build system the guesser detected
        template = guesser.build_system
        if template == 'generic':
            tty.warn("Unable to detect a build system. "
                     "Using a generic package template.")
        else:
            msg = "This package looks like it uses the {0} build system"
            tty.msg(msg.format(template))

    return template
Пример #27
0
def deactivate(parser, args):
    # TODO: shouldn't have to concretize here.  Fix DAG issues.
    specs = spack.cmd.parse_specs(args.spec, concretize=True)
    if len(specs) != 1:
        tty.die("deactivate requires one spec.  %d given." % len(specs))

    # TODO: remove this hack when DAG info is stored properly.
    # This ensures the ext spec is always normalized properly.
    spack.db.get(specs[0])

    spec = spack.cmd.disambiguate_spec(specs[0])
    pkg = spec.package

    if args.all:
        if pkg.extendable:
            tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
            ext_pkgs = spack.db.installed_extensions_for(spec)

            for ext_pkg in ext_pkgs:
                ext_pkg.spec.normalize()
                if ext_pkg.activated:
                    ext_pkg.do_deactivate(force=True)

        elif pkg.is_extension:
            # TODO: store DAG info properly (see above)
            spec.normalize()

            if not args.force and not spec.package.activated:
                tty.die("%s is not activated." % pkg.spec.short_spec)

            tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)

            topo_order = topological_sort(spec)
            index = spec.index()

            for name in topo_order:
                espec = index[name]
                epkg = espec.package

                # TODO: store DAG info properly (see above)
                epkg.spec.normalize()

                if epkg.extends(pkg.extendee_spec):
                    if epkg.activated or args.force:

                        epkg.do_deactivate(force=args.force)

        else:
            tty.die("spack deactivate --all requires an extendable package or an extension.")

    else:
        if not pkg.is_extension:
            tty.die("spack deactivate requires an extension.",
                    "Did you mean 'spack deactivate --all'?")

        if not args.force and not spec.package.activated:
            tty.die("Package %s is not activated." % specs[0].short_spec)

        spec.package.do_deactivate(force=args.force)
Пример #28
0
def mirror_create(args):
    """Create a directory to be used as a spack mirror, and fill it with
       package archives."""
    # try to parse specs from the command line first.
    with spack.concretize.concretizer.disable_compiler_existence_check():
        specs = spack.cmd.parse_specs(args.specs, concretize=True)

        # If there is a file, parse each line as a spec and add it to the list.
        if args.file:
            if specs:
                tty.die("Cannot pass specs on the command line with --file.")
            specs = _read_specs_from_file(args.file)

        # If nothing is passed, use all packages.
        if not specs:
            specs = [Spec(n) for n in spack.repo.all_package_names()]
            specs.sort(key=lambda s: s.format("$_$@").lower())

        # If the user asked for dependencies, traverse spec DAG get them.
        if args.dependencies:
            new_specs = set()
            for spec in specs:
                spec.concretize()
                for s in spec.traverse():
                    new_specs.add(s)
            specs = list(new_specs)

        # Skip external specs, as they are already installed
        external_specs = [s for s in specs if s.external]
        specs = [s for s in specs if not s.external]

        for spec in external_specs:
            msg = 'Skipping {0} as it is an external spec.'
            tty.msg(msg.format(spec.cshort_spec))

        # Default name for directory is spack-mirror-<DATESTAMP>
        directory = args.directory
        if not directory:
            timestamp = datetime.now().strftime("%Y-%m-%d")
            directory = 'spack-mirror-' + timestamp

        # Make sure nothing is in the way.
        existed = os.path.isdir(directory)

        # Actually do the work to create the mirror
        present, mirrored, error = spack.mirror.create(
            directory, specs, num_versions=args.one_version_per_spec)
        p, m, e = len(present), len(mirrored), len(error)

        verb = "updated" if existed else "created"
        tty.msg(
            "Successfully %s mirror in %s" % (verb, directory),
            "Archive stats:",
            "  %-4d already present"  % p,
            "  %-4d added"            % m,
            "  %-4d failed to fetch." % e)
        if error:
            tty.error("Failed downloads:")
            colify(s.cformat("$_$@") for s in error)
Пример #29
0
def setup(self, args):
    if not args.spec:
        tty.die("spack setup requires a package spec argument.")

    specs = spack.cmd.parse_specs(args.spec)
    if len(specs) > 1:
        tty.die("spack setup only takes one spec.")

    # Take a write lock before checking for existence.
    with spack.store.db.write_transaction():
        spec = specs[0]
        if not spack.repo.exists(spec.name):
            tty.die("No package for '{0}' was found.".format(spec.name),
                    "  Use `spack create` to create a new package")
        if not spec.versions.concrete:
            tty.die(
                "spack setup spec must have a single, concrete version. "
                "Did you forget a package version number?")

        spec.concretize()
        package = spack.repo.get(spec)
        if not isinstance(package, spack.CMakePackage):
            tty.die(
                'Support for {0} derived packages not yet implemented'.format(
                    package.build_system_class
                )
            )

        # It's OK if the package is already installed.

        # Forces the build to run out of the current directory.
        package.stage = DIYStage(os.getcwd())

        # TODO: make this an argument, not a global.
        spack.do_checksum = False

        # Install dependencies if requested to do so
        if not args.ignore_deps:
            parser = argparse.ArgumentParser()
            install.setup_parser(parser)
            inst_args = copy.deepcopy(args)
            inst_args = parser.parse_args(
                ['--only=dependencies'] + args.spec,
                namespace=inst_args
            )
            install.install(parser, inst_args)
        # Generate spconfig.py
        tty.msg(
            'Generating spconfig.py [{0}]'.format(package.spec.cshort_spec)
        )
        write_spconfig(package)
        # Install this package to register it in the DB and permit
        # module file regeneration
        inst_args = copy.deepcopy(args)
        inst_args = parser.parse_args(
            ['--only=package', '--fake'] + args.spec,
            namespace=inst_args
        )
        install.install(parser, inst_args)
Пример #30
0
    def __init__(self, name, *args):
        # If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
        if not name.startswith('perl-'):
            # Make it more obvious that we are renaming the package
            tty.msg("Changing package name from {0} to perl-{0}".format(name))
            name = 'perl-{0}'.format(name)

        super(PerlmakePackageTemplate, self).__init__(name, *args)
Пример #31
0
def get_checksums_for_versions(
        url_dict, name, first_stage_function=None, keep_stage=False,
        fetch_options=None, batch=False):
    """Fetches and checksums archives from URLs.

    This function is called by both ``spack checksum`` and ``spack
    create``.  The ``first_stage_function`` argument allows the caller to
    inspect the first downloaded archive, e.g., to determine the build
    system.

    Args:
        url_dict (dict): A dictionary of the form: version -> URL
        name (str): The name of the package
        first_stage_function (typing.Callable): function that takes a Stage and a URL;
            this is run on the stage of the first URL downloaded
        keep_stage (bool): whether to keep staging area when command completes
        batch (bool): whether to ask user how many versions to fetch (false)
            or fetch all versions (true)
        fetch_options (dict): Options used for the fetcher (such as timeout
            or cookies)

    Returns:
        (str): A multi-line string containing versions and corresponding hashes

    """
    sorted_versions = sorted(url_dict.keys(), reverse=True)

    # Find length of longest string in the list for padding
    max_len = max(len(str(v)) for v in sorted_versions)
    num_ver = len(sorted_versions)

    tty.msg('Found {0} version{1} of {2}:'.format(
            num_ver, '' if num_ver == 1 else 's', name),
            '',
            *llnl.util.lang.elide_list(
                ['{0:{1}}  {2}'.format(str(v), max_len, url_dict[v])
                 for v in sorted_versions]))
    print()

    if batch:
        archives_to_fetch = len(sorted_versions)
    else:
        archives_to_fetch = tty.get_number(
            "How many would you like to checksum?", default=1, abort='q')

    if not archives_to_fetch:
        tty.die("Aborted.")

    versions = sorted_versions[:archives_to_fetch]
    urls = [url_dict[v] for v in versions]

    tty.debug('Downloading...')
    version_hashes = []
    i = 0
    errors = []
    for url, version in zip(urls, versions):
        try:
            if fetch_options:
                url_or_fs = fs.URLFetchStrategy(
                    url, fetch_options=fetch_options)
            else:
                url_or_fs = url
            with Stage(url_or_fs, keep=keep_stage) as stage:
                # Fetch the archive
                stage.fetch()
                if i == 0 and first_stage_function:
                    # Only run first_stage_function the first time,
                    # no need to run it every time
                    first_stage_function(stage, url)

                # Checksum the archive and add it to the list
                version_hashes.append((version, spack.util.crypto.checksum(
                    hashlib.sha256, stage.archive_file)))
                i += 1
        except FailedDownloadError:
            errors.append('Failed to fetch {0}'.format(url))
        except Exception as e:
            tty.msg('Something failed on {0}, skipping.  ({1})'.format(url, e))

    for msg in errors:
        tty.debug(msg)

    if not version_hashes:
        tty.die("Could not fetch any versions for {0}".format(name))

    # Find length of longest string in the list for padding
    max_len = max(len(str(v)) for v, h in version_hashes)

    # Generate the version directives to put in a package.py
    version_lines = "\n".join([
        "    version('{0}', {1}sha256='{2}')".format(
            v, ' ' * (max_len - len(str(v))), h) for v, h in version_hashes
    ])

    num_hash = len(version_hashes)
    tty.debug('Checksummed {0} version{1} of {2}:'.format(
              num_hash, '' if num_hash == 1 else 's', name))

    return version_lines
Пример #32
0
def url_parse(args):
    url = args.url

    tty.msg('Parsing URL: {0}'.format(url))
    print()

    ver,  vs, vl, vi, vregex = parse_version_offset(url)
    tty.msg('Matched version regex {0:>2}: r{1!r}'.format(vi, vregex))

    name, ns, nl, ni, nregex = parse_name_offset(url, ver)
    tty.msg('Matched  name   regex {0:>2}: r{1!r}'.format(ni, nregex))

    print()
    tty.msg('Detected:')
    try:
        print_name_and_version(url)
    except UrlParseError as e:
        tty.error(str(e))

    print('    name:    {0}'.format(name))
    print('    version: {0}'.format(ver))
    print()

    tty.msg('Substituting version 9.9.9b:')
    newurl = substitute_version(url, '9.9.9b')
    print_name_and_version(newurl)

    if args.spider:
        print()
        tty.msg('Spidering for versions:')
        versions = find_versions_of_archive(url)

        if not versions:
            print('  Found no versions for {0}'.format(name))
            return

        max_len = max(len(str(v)) for v in versions)

        for v in sorted(versions):
            print('{0:{1}}  {2}'.format(v, max_len, versions[v]))
Пример #33
0
def url_stats(args):
    stats = {}  # stats about fetchers in packages.
    nvers = 0   # total number of versions
    npkgs = 0   # total number of packages

    def inc(fstype, category, attr=None):
        """Increment statistics in the stats dict."""
        categories = stats.setdefault(fstype, {})
        if attr:
            cat_stats = categories.setdefault(category, {})
            val = cat_stats.setdefault(attr, 0)
            stats[fstype][category][attr] = val + 1
        else:
            val = categories.setdefault(category, 0)
            stats[fstype][category] = val + 1

    # over all packages
    for pkg in spack.repo.path.all_packages():
        npkgs += 1

        # look at each version
        for v, args in pkg.versions.items():
            # figure out what type of fetcher it is
            fetcher = fs.for_package_version(pkg, v)
            nvers += 1

            fstype = fetcher.url_attr
            inc(fstype, 'total')

            # put some special stats in for particular types of fetchers.
            if fstype == 'git':
                if 'commit' in args:
                    inc('git', 'security', 'commit')
                else:
                    inc('git', 'security', 'no commit')
            elif fstype == 'url':
                for h in crypto.hashes:
                    if h in args:
                        inc('url', 'checksums', h)
                        break
                else:
                    if 'checksum' in args:
                        h = crypto.hash_algo_for_digest(args['checksum'])
                        inc('url', 'checksums', h)
                    else:
                        inc('url', 'checksums', 'no checksum')

                # parse out the URL scheme (https/http/ftp/etc.)
                urlinfo = urlparse(fetcher.url)
                inc('url', 'schemes', urlinfo.scheme)

    # print a nice summary table
    tty.msg("%d total versions for %d packages:" % (nvers, npkgs))
    line_width = 36
    print("-" * line_width)
    for fetcher, fetcher_stats in sorted(stats.items(), reverse=True):
        fs_total = fetcher_stats['total']
        fs_pct = float(fs_total) / nvers * 100
        print("%-22s%5d%8.1f%%" % (fetcher, fs_total, fs_pct))

        for category, cat_stats in sorted(fetcher_stats.items(), reverse=True):
            if category == 'total':
                continue
            print("  %s" % category)

            for name, number in sorted(cat_stats.items(), reverse=True):
                pct = float(number) / fs_total * 100
                print("    %-18s%5d%8.1f%%" % (name, number, pct))
        print("-" * line_width)
Пример #34
0
def install(parser, args, **kwargs):
    if not args.package and not args.specfiles:
        tty.die("install requires at least one package argument or yaml file")

    if args.jobs is not None:
        if args.jobs <= 0:
            tty.die("The -j option must be a positive integer!")

    if args.no_checksum:
        spack.config.set('config:checksum', False, scope='command_line')

    # Parse cli arguments and construct a dictionary
    # that will be passed to Package.do_install API
    kwargs.update({
        'keep_prefix': args.keep_prefix,
        'keep_stage': args.keep_stage,
        'restage': not args.dont_restage,
        'install_source': args.install_source,
        'install_deps': 'dependencies' in args.things_to_install,
        'make_jobs': args.jobs,
        'verbose': args.verbose,
        'fake': args.fake,
        'dirty': args.dirty,
        'use_cache': args.use_cache
    })

    if args.run_tests:
        tty.warn("Deprecated option: --run-tests: use --test=all instead")

    # 1. Abstract specs from cli
    reporter = spack.report.collect_info(args.log_format,
                                         ' '.join(args.package),
                                         args.cdash_upload_url)
    if args.log_file:
        reporter.filename = args.log_file

    specs = spack.cmd.parse_specs(args.package)
    tests = False
    if args.test == 'all' or args.run_tests:
        tests = True
    elif args.test == 'root':
        tests = [spec.name for spec in specs]
    kwargs['tests'] = tests

    try:
        specs = spack.cmd.parse_specs(args.package,
                                      concretize=True,
                                      tests=tests)
    except SpackError as e:
        reporter.concretization_report(e.message)
        raise

    # 2. Concrete specs from yaml files
    for file in args.specfiles:
        with open(file, 'r') as f:
            s = spack.spec.Spec.from_yaml(f)

        if s.concretized().dag_hash() != s.dag_hash():
            msg = 'skipped invalid file "{0}". '
            msg += 'The file does not contain a concrete spec.'
            tty.warn(msg.format(file))
            continue

        specs.append(s.concretized())

    if len(specs) == 0:
        tty.die('The `spack install` command requires a spec to install.')

    if not args.log_file:
        reporter.filename = default_log_file(specs[0])
    reporter.specs = specs
    with reporter:
        if args.overwrite:
            # If we asked to overwrite an existing spec we must ensure that:
            # 1. We have only one spec
            # 2. The spec is already installed
            assert len(specs) == 1, \
                "only one spec is allowed when overwriting an installation"

            spec = specs[0]
            t = spack.store.db.query(spec)
            assert len(t) == 1, "to overwrite a spec you must install it first"

            # Give the user a last chance to think about overwriting an already
            # existing installation
            if not args.yes_to_all:
                tty.msg('The following package will be reinstalled:\n')

                display_args = {
                    'long': True,
                    'show_flags': True,
                    'variants': True
                }

                spack.cmd.display_specs(t, **display_args)
                answer = tty.get_yes_or_no('Do you want to proceed?',
                                           default=False)
                if not answer:
                    tty.die('Reinstallation aborted.')

            with fs.replace_directory_transaction(specs[0].prefix):
                install_spec(args, kwargs, specs[0])

        else:
            for spec in specs:
                install_spec(args, kwargs, spec)
Пример #35
0
 def _not_locked(installer, lock_type, pkg):
     tty.msg('{0} locked {1}'.format(lock_type, pkg.spec.name))
     return lock_type, None
Пример #36
0
 def _prep(installer, task, keep_prefix, keep_stage, restage):
     tty.msg('preparing {0}'.format(task.pkg.spec.name))
     assert task.pkg.spec.name not in installer.installed
Пример #37
0
 def _prep(installer, task):
     tty.msg('preparing {0}' .format(task.pkg.spec.name))
     assert task.pkg.spec.name not in installer.installed
Пример #38
0
 def _read(installer, lock_type, pkg):
     tty.msg('{0}->read locked {1}' .format(lock_type, pkg.spec.name))
     return orig_fn(installer, 'read', pkg)
Пример #39
0
 def _requeued(installer, task):
     tty.msg('requeued {0}' .format(inst.package_id(task.pkg)))
Пример #40
0
def buildcache_copy(args):
    """Copy a buildcache entry and all its files from one mirror, given as
    '--base-dir', to some other mirror, specified as '--destination-url'.
    The specific buildcache entry to be copied from one location to the
    other is identified using the '--spec-yaml' argument."""
    # TODO: This sub-command should go away once #11117 is merged

    if not args.spec_yaml:
        tty.msg('No spec yaml provided, exiting.')
        sys.exit(1)

    if not args.base_dir:
        tty.msg('No base directory provided, exiting.')
        sys.exit(1)

    if not args.destination_url:
        tty.msg('No destination mirror url provided, exiting.')
        sys.exit(1)

    dest_url = args.destination_url

    if dest_url[0:7] != 'file://' and dest_url[0] != '/':
        tty.msg('Only urls beginning with "file://" or "/" are supported ' +
                'by buildcache copy.')
        sys.exit(1)

    try:
        with open(args.spec_yaml, 'r') as fd:
            spec = Spec.from_yaml(fd.read())
    except Exception as e:
        tty.debug(e)
        tty.error('Unable to concrectize spec from yaml {0}'.format(
            args.spec_yaml))
        sys.exit(1)

    dest_root_path = dest_url
    if dest_url[0:7] == 'file://':
        dest_root_path = dest_url[7:]

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_rel_path = os.path.join(build_cache_dir,
                                    bindist.tarball_path_name(spec, '.spack'))
    tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
    tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)

    specfile_rel_path = os.path.join(build_cache_dir,
                                     bindist.tarball_name(spec, '.spec.yaml'))
    specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)

    cdashidfile_rel_path = os.path.join(build_cache_dir,
                                        bindist.tarball_name(spec, '.cdashid'))
    cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
    cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)

    # Make sure directory structure exists before attempting to copy
    os.makedirs(os.path.dirname(tarball_dest_path))

    # Now copy the specfile and tarball files to the destination mirror
    tty.msg('Copying {0}'.format(tarball_rel_path))
    shutil.copyfile(tarball_src_path, tarball_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path))
    shutil.copyfile(specfile_src_path, specfile_dest_path)

    # Copy the cdashid file (if exists) to the destination mirror
    if os.path.exists(cdashid_src_path):
        tty.msg('Copying {0}'.format(cdashidfile_rel_path))
        shutil.copyfile(cdashid_src_path, cdashid_dest_path)
Пример #41
0
def _determine_specs_to_mirror(args):
    if args.specs and args.all:
        raise SpackError("Cannot specify specs on command line if you"
                         " chose to mirror all specs with '--all'")
    elif args.file and args.all:
        raise SpackError("Cannot specify specs with a file ('-f') if you"
                         " chose to mirror all specs with '--all'")

    if not args.versions_per_spec:
        num_versions = 1
    elif args.versions_per_spec == 'all':
        num_versions = 'all'
    else:
        try:
            num_versions = int(args.versions_per_spec)
        except ValueError:
            raise SpackError("'--versions-per-spec' must be a number or 'all',"
                             " got '{0}'".format(args.versions_per_spec))

    # try to parse specs from the command line first.
    with spack.concretize.disable_compiler_existence_check():
        specs = spack.cmd.parse_specs(args.specs, concretize=True)

        # If there is a file, parse each line as a spec and add it to the list.
        if args.file:
            if specs:
                tty.die("Cannot pass specs on the command line with --file.")
            specs = _read_specs_from_file(args.file)

        env_specs = None
        if not specs:
            # If nothing is passed, use environment or all if no active env
            if not args.all:
                tty.die(
                    "No packages were specified.",
                    "To mirror all packages, use the '--all' option"
                    " (this will require significant time and space).")

            env = ev.get_env(args, 'mirror')
            if env:
                env_specs = env.all_specs()
            else:
                specs = [Spec(n) for n in spack.repo.all_package_names()]
        else:
            # If the user asked for dependencies, traverse spec DAG get them.
            if args.dependencies:
                new_specs = set()
                for spec in specs:
                    spec.concretize()
                    for s in spec.traverse():
                        new_specs.add(s)
                specs = list(new_specs)

            # Skip external specs, as they are already installed
            external_specs = [s for s in specs if s.external]
            specs = [s for s in specs if not s.external]

            for spec in external_specs:
                msg = 'Skipping {0} as it is an external spec.'
                tty.msg(msg.format(spec.cshort_spec))

        if env_specs:
            if args.versions_per_spec:
                tty.warn("Ignoring '--versions-per-spec' for mirroring specs"
                         " in environment.")
            mirror_specs = env_specs
        else:
            if num_versions == 'all':
                mirror_specs = spack.mirror.get_all_versions(specs)
            else:
                mirror_specs = spack.mirror.get_matching_versions(
                    specs, num_versions=num_versions)
            mirror_specs.sort(key=lambda s: (s.name, s.version))

    exclude_specs = []
    if args.exclude_file:
        exclude_specs.extend(_read_specs_from_file(args.exclude_file))
    if args.exclude_specs:
        exclude_specs.extend(
            spack.cmd.parse_specs(str(args.exclude_specs).split()))
    if exclude_specs:
        mirror_specs = list(
            x for x in mirror_specs
            if not any(x.satisfies(y, strict=True) for y in exclude_specs))

    return mirror_specs
Пример #42
0
 def edit(self, spec, prefix):
     """Edits the Makefile before calling make. This phase cannot
     be defaulted.
     """
     tty.msg('Using default implementation: skipping edit phase.')
Пример #43
0
 def check(self):
     tty.msg("No checksum needed when fetching with %s" % self.url_attr)
Пример #44
0
 def _requeued(installer, task):
     tty.msg('requeued {0}' .format(task.pkg.spec.name))
Пример #45
0
    # Try to guess a name.  If it doesn't work, allow the user to override.
    if args.alternate_name:
        name = args.alternate_name
    else:
        try:
            name = spack.url.parse_name(url, version)
        except spack.url.UndetectableNameError, e:
            # Use a user-supplied name if one is present
            tty.die("Couldn't guess a name for this package. Try running:", "",
                    "spack create --name <name> <url>")

    if not valid_module_name(name):
        tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")

    tty.msg("This looks like a URL for %s version %s." % (name, version))
    tty.msg("Creating template for package %s" % name)

    # Create a directory for the new package.
    pkg_path = spack.db.filename_for_package_name(name)
    if os.path.exists(pkg_path) and not args.force:
        tty.die("%s already exists." % pkg_path)
    else:
        mkdirp(os.path.dirname(pkg_path))

    versions = spack.package.find_versions_of_archive(url)
    rkeys = sorted(versions.keys(), reverse=True)
    versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))

    archives_to_fetch = 1
    if not versions:
Пример #46
0
def _not_locked(installer, lock_type, pkg):
    """Generic monkeypatch function for _ensure_locked to return no lock"""
    tty.msg('{0} locked {1}' .format(lock_type, pkg.spec.name))
    return lock_type, None
Пример #47
0
 def _install(installer, task, **kwargs):
     tty.msg('{0} installing'.format(task.pkg.spec.name))
Пример #48
0
 def _add(_compilers):
     tty.msg(config_msg)
Пример #49
0
def install(parser, args, **kwargs):
    if not args.package:
        tty.die("install requires at least one package argument")

    if args.jobs is not None:
        if args.jobs <= 0:
            tty.die("The -j option must be a positive integer!")

    if args.no_checksum:
        spack.do_checksum = False  # TODO: remove this global.

    # Parse cli arguments and construct a dictionary
    # that will be passed to Package.do_install API
    kwargs.update({
        'keep_prefix': args.keep_prefix,
        'keep_stage': args.keep_stage,
        'restage': args.restage,
        'install_source': args.install_source,
        'install_deps': 'dependencies' in args.things_to_install,
        'make_jobs': args.jobs,
        'verbose': args.verbose,
        'fake': args.fake,
        'dirty': args.dirty,
        'use_cache': args.use_cache
    })

    if args.run_tests:
        tty.warn("Deprecated option: --run-tests: use --test=all instead")

    specs = spack.cmd.parse_specs(args.package)
    if args.test == 'all' or args.run_tests:
        spack.package_testing.test_all()
    elif args.test == 'root':
        for spec in specs:
            spack.package_testing.test(spec.name)

    # Spec from cli
    specs = []
    if args.file:
        for file in args.package:
            with open(file, 'r') as f:
                specs.append(spack.spec.Spec.from_yaml(f))
    else:
        specs = spack.cmd.parse_specs(args.package, concretize=True)
    if len(specs) == 0:
        tty.error('The `spack install` command requires a spec to install.')

    if args.overwrite:
        # If we asked to overwrite an existing spec we must ensure that:
        # 1. We have only one spec
        # 2. The spec is already installed
        assert len(specs) == 1, \
            "only one spec is allowed when overwriting an installation"

        spec = specs[0]
        t = spack.store.db.query(spec)
        assert len(t) == 1, "to overwrite a spec you must install it first"

        # Give the user a last chance to think about overwriting an already
        # existing installation
        if not args.yes_to_all:
            tty.msg('The following package will be reinstalled:\n')

            display_args = {'long': True, 'show_flags': True, 'variants': True}

            spack.cmd.display_specs(t, **display_args)
            answer = tty.get_yes_or_no('Do you want to proceed?',
                                       default=False)
            if not answer:
                tty.die('Reinstallation aborted.')

        with fs.replace_directory_transaction(specs[0].prefix):
            install_spec(args, kwargs, specs[0])

    else:

        for spec in specs:
            install_spec(args, kwargs, spec)
Пример #50
0
 def _chgrp(path, group, follow_symlinks=True):
     tty.msg(mock_chgrp_msg.format(path, group))
Пример #51
0
def _env_create(name_or_path,
                init_file=None,
                dir=False,
                with_view=None,
                keep_relative=False):
    """Create a new environment, with an optional yaml description.

    Arguments:
        name_or_path (str): name of the environment to create, or path to it
        init_file (str or file): optional initialization file -- can be
            spack.yaml or spack.lock
        dir (bool): if True, create an environment in a directory instead
            of a named environment
        keep_relative (bool): if True, develop paths are copied verbatim into
            the new environment file, otherwise they may be made absolute if the
            new environment is in a different location
    """
    if dir:
        env = ev.Environment(name_or_path, init_file, with_view, keep_relative)
        env.write()
        tty.msg("Created environment in %s" % env.path)
        tty.msg("You can activate this environment with:")
        tty.msg("  spack env activate %s" % env.path)
    else:
        env = ev.create(name_or_path, init_file, with_view, keep_relative)
        env.write()
        tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
        tty.msg("You can activate this environment with:")
        tty.msg("  spack env activate %s" % (name_or_path))
    return env
Пример #52
0
    def _fetch_from_url(self, url):
        save_file = None
        partial_file = None
        if self.stage.save_filename:
            save_file = self.stage.save_filename
            partial_file = self.stage.save_filename + '.part'
        tty.msg("Fetching %s" % url)
        if partial_file:
            save_args = [
                '-C',
                '-',  # continue partial downloads
                '-o',
                partial_file
            ]  # use a .part file
        else:
            save_args = ['-O']

        curl_args = save_args + [
            '-f',  # fail on >400 errors
            '-D',
            '-',  # print out HTML headers
            '-L',  # resolve 3xx redirects
            # Timeout if can't establish a connection after 10 sec.
            '--connect-timeout',
            '10',
            url,
        ]

        if not spack.config.get('config:verify_ssl'):
            curl_args.append('-k')

        if sys.stdout.isatty() and tty.msg_enabled():
            curl_args.append('-#')  # status bar when using a tty
        else:
            curl_args.append('-sS')  # just errors when not.

        curl_args += self.extra_curl_options

        # Run curl but grab the mime type from the http headers
        curl = self.curl
        with working_dir(self.stage.path):
            headers = curl(*curl_args, output=str, fail_on_error=False)

        if curl.returncode != 0:
            # clean up archive on failure.
            if self.archive_file:
                os.remove(self.archive_file)

            if partial_file and os.path.exists(partial_file):
                os.remove(partial_file)

            if curl.returncode == 22:
                # This is a 404.  Curl will print the error.
                raise FailedDownloadError(self.url,
                                          "URL %s was not found!" % self.url)

            elif curl.returncode == 60:
                # This is a certificate error.  Suggest spack -k
                raise FailedDownloadError(
                    self.url,
                    "Curl was unable to fetch due to invalid certificate. "
                    "This is either an attack, or your cluster's SSL "
                    "configuration is bad.  If you believe your SSL "
                    "configuration is bad, you can try running spack -k, "
                    "which will not check SSL certificates."
                    "Use this at your own risk.")

            else:
                # This is some other curl error.  Curl will print the
                # error, but print a spack message too
                raise FailedDownloadError(
                    self.url, "Curl failed with error %d" % curl.returncode)

        # Check if we somehow got an HTML file rather than the archive we
        # asked for.  We only look at the last content type, to handle
        # redirects properly.
        content_types = re.findall(r'Content-Type:[^\r\n]+',
                                   headers,
                                   flags=re.IGNORECASE)
        if content_types and 'text/html' in content_types[-1]:
            warn_content_type_mismatch(self.archive_file or "the archive")
        return partial_file, save_file
Пример #53
0
def url_summary(args):
    # Collect statistics on how many URLs were correctly parsed
    total_urls       = 0
    correct_names    = 0
    correct_versions = 0

    # Collect statistics on which regexes were matched and how often
    name_regex_dict    = dict()
    name_count_dict    = defaultdict(int)
    version_regex_dict = dict()
    version_count_dict = defaultdict(int)

    tty.msg('Generating a summary of URL parsing in Spack...')

    # Loop through all packages
    for pkg in spack.repo.path.all_packages():
        urls = set()

        url = getattr(pkg.__class__, 'url', None)
        if url:
            urls.add(url)

        for params in pkg.versions.values():
            url = params.get('url', None)
            if url:
                urls.add(url)

        # Calculate statistics
        for url in urls:
            total_urls += 1

            # Parse versions
            version = None
            try:
                version, vs, vl, vi, vregex = parse_version_offset(url)
                version_regex_dict[vi] = vregex
                version_count_dict[vi] += 1
                if version_parsed_correctly(pkg, version):
                    correct_versions += 1
            except UndetectableVersionError:
                pass

            # Parse names
            try:
                name, ns, nl, ni, nregex = parse_name_offset(url, version)
                name_regex_dict[ni] = nregex
                name_count_dict[ni] += 1
                if name_parsed_correctly(pkg, name):
                    correct_names += 1
            except UndetectableNameError:
                pass

    print()
    print('    Total URLs found:          {0}'.format(total_urls))
    print('    Names correctly parsed:    {0:>4}/{1:>4} ({2:>6.2%})'.format(
        correct_names, total_urls, correct_names / total_urls))
    print('    Versions correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})'.format(
        correct_versions, total_urls, correct_versions / total_urls))
    print()

    tty.msg('Statistics on name regular expressions:')

    print()
    print('    Index  Count  Regular Expression')
    for ni in sorted(name_regex_dict.keys()):
        print('    {0:>3}: {1:>6}   r{2!r}'.format(
            ni, name_count_dict[ni], name_regex_dict[ni]))
    print()

    tty.msg('Statistics on version regular expressions:')

    print()
    print('    Index  Count  Regular Expression')
    for vi in sorted(version_regex_dict.keys()):
        print('    {0:>3}: {1:>6}   r{2!r}'.format(
            vi, version_count_dict[vi], version_regex_dict[vi]))
    print()

    # Return statistics, only for testing purposes
    return (total_urls, correct_names, correct_versions,
            name_count_dict, version_count_dict)
Пример #54
0
    def expand(self):
        if not self.expand_archive:
            tty.msg("Staging unexpanded archive %s in %s" %
                    (self.archive_file, self.stage.source_path))
            if not self.stage.expanded:
                mkdirp(self.stage.source_path)
            dest = os.path.join(self.stage.source_path,
                                os.path.basename(self.archive_file))
            shutil.move(self.archive_file, dest)
            return

        tty.msg("Staging archive: %s" % self.archive_file)

        if not self.archive_file:
            raise NoArchiveFileError(
                "Couldn't find archive file",
                "Failed on expand() for URL %s" % self.url)

        if not self.extension:
            self.extension = extension(self.archive_file)

        if self.stage.expanded:
            tty.debug('Source already staged to %s' % self.stage.source_path)
            return

        decompress = decompressor_for(self.archive_file, self.extension)

        # Expand all tarballs in their own directory to contain
        # exploding tarballs.
        tarball_container = os.path.join(self.stage.path,
                                         "spack-expanded-archive")

        mkdirp(tarball_container)
        with working_dir(tarball_container):
            decompress(self.archive_file)

        # Check for an exploding tarball, i.e. one that doesn't expand to
        # a single directory.  If the tarball *didn't* explode, move its
        # contents to the staging source directory & remove the container
        # directory.  If the tarball did explode, just rename the tarball
        # directory to the staging source directory.
        #
        # NOTE: The tar program on Mac OS X will encode HFS metadata in
        # hidden files, which can end up *alongside* a single top-level
        # directory.  We initially ignore presence of hidden files to
        # accomodate these "semi-exploding" tarballs but ensure the files
        # are copied to the source directory.
        files = os.listdir(tarball_container)
        non_hidden = [f for f in files if not f.startswith('.')]
        if len(non_hidden) == 1:
            src = os.path.join(tarball_container, non_hidden[0])
            if os.path.isdir(src):
                self.stage.srcdir = non_hidden[0]
                shutil.move(src, self.stage.source_path)
                if len(files) > 1:
                    files.remove(non_hidden[0])
                    for f in files:
                        src = os.path.join(tarball_container, f)
                        dest = os.path.join(self.stage.path, f)
                        shutil.move(src, dest)
                os.rmdir(tarball_container)
            else:
                # This is a non-directory entry (e.g., a patch file) so simply
                # rename the tarball container to be the source path.
                shutil.move(tarball_container, self.stage.source_path)

        else:
            shutil.move(tarball_container, self.stage.source_path)
Пример #55
0
def config_prefer_upstream(args):
    """Generate a packages config based on the configuration of all upstream
    installs."""

    scope = args.scope
    if scope is None:
        scope = spack.config.default_modify_scope('packages')

    all_specs = set(spack.store.db.query(installed=True))
    local_specs = set(spack.store.db.query_local(installed=True))
    pref_specs = local_specs if args.local else all_specs - local_specs

    conflicting_variants = set()

    pkgs = {}
    for spec in pref_specs:
        # Collect all the upstream compilers and versions for this package.
        pkg = pkgs.get(spec.name, {
            'version': [],
            'compiler': [],
        })
        pkgs[spec.name] = pkg

        # We have no existing variant if this is our first added version.
        existing_variants = pkg.get('variants',
                                    None if not pkg['version'] else '')

        version = spec.version.string
        if version not in pkg['version']:
            pkg['version'].append(version)

        compiler = str(spec.compiler)
        if compiler not in pkg['compiler']:
            pkg['compiler'].append(compiler)

        # Get and list all the variants that differ from the default.
        variants = []
        for var_name, variant in spec.variants.items():
            if (var_name in ['patches']
                    or var_name not in spec.package.variants):
                continue

            if variant.value != spec.package.variants[var_name].default:
                variants.append(str(variant))
        variants.sort()
        variants = ' '.join(variants)

        if spec.name not in conflicting_variants:
            # Only specify the variants if there's a single variant
            # set across all versions/compilers.
            if existing_variants is not None and existing_variants != variants:
                conflicting_variants.add(spec.name)
                pkg.pop('variants', None)
            elif variants:
                pkg['variants'] = variants

    if conflicting_variants:
        tty.warn("The following packages have multiple conflicting upstream "
                 "specs. You may have to specify, by "
                 "concretized hash, which spec you want when building "
                 "packages that depend on them:\n - {0}".format("\n - ".join(
                     sorted(conflicting_variants))))

    # Simply write the config to the specified file.
    existing = spack.config.get('packages', scope=scope)
    new = spack.config.merge_yaml(existing, pkgs)
    spack.config.set('packages', new, scope)
    config_file = spack.config.config.get_config_filename(scope, section)

    tty.msg("Updated config at {0}".format(config_file))
Пример #56
0
    def fetch(self):
        if self.stage.expanded:
            tty.msg("Already fetched {0}".format(self.stage.source_path))
            return

        tty.msg("Cloning git repository: {0}".format(self._repo_info()))

        git = self.git
        if self.commit:
            # Need to do a regular clone and check out everything if
            # they asked for a particular commit.
            debug = spack.config.get('config:debug')

            clone_args = ['clone', self.url]
            if not debug:
                clone_args.insert(1, '--quiet')
            with temp_cwd():
                git(*clone_args)
                repo_name = get_single_file('.')
                self.stage.srcdir = repo_name
                shutil.move(repo_name, self.stage.source_path)

            with working_dir(self.stage.source_path):
                checkout_args = ['checkout', self.commit]
                if not debug:
                    checkout_args.insert(1, '--quiet')
                git(*checkout_args)

        else:
            # Can be more efficient if not checking out a specific commit.
            args = ['clone']
            if not spack.config.get('config:debug'):
                args.append('--quiet')

            # If we want a particular branch ask for it.
            if self.branch:
                args.extend(['--branch', self.branch])
            elif self.tag and self.git_version >= ver('1.8.5.2'):
                args.extend(['--branch', self.tag])

            # Try to be efficient if we're using a new enough git.
            # This checks out only one branch's history
            if self.git_version >= ver('1.7.10'):
                if self.get_full_repo:
                    args.append('--no-single-branch')
                else:
                    args.append('--single-branch')

            with temp_cwd():
                # Yet more efficiency: only download a 1-commit deep
                # tree, if the in-use git and protocol permit it.
                if (not self.get_full_repo) and \
                   self.git_version >= ver('1.7.1') and \
                   self.protocol_supports_shallow_clone():
                    args.extend(['--depth', '1'])

                args.extend([self.url])
                git(*args)

                repo_name = get_single_file('.')
                self.stage.srcdir = repo_name
                shutil.move(repo_name, self.stage.source_path)

            with working_dir(self.stage.source_path):
                # For tags, be conservative and check them out AFTER
                # cloning.  Later git versions can do this with clone
                # --branch, but older ones fail.
                if self.tag and self.git_version < ver('1.8.5.2'):
                    # pull --tags returns a "special" error code of 1 in
                    # older versions that we have to ignore.
                    # see: https://github.com/git/git/commit/19d122b
                    pull_args = ['pull', '--tags']
                    co_args = ['checkout', self.tag]
                    if not spack.config.get('config:debug'):
                        pull_args.insert(1, '--quiet')
                        co_args.insert(1, '--quiet')

                    git(*pull_args, ignore_errors=1)
                    git(*co_args)

        if self.submodules_delete:
            with working_dir(self.stage.source_path):
                for submodule_to_delete in self.submodules_delete:
                    args = ['rm', submodule_to_delete]
                    if not spack.config.get('config:debug'):
                        args.insert(1, '--quiet')
                    git(*args)

        # Init submodules if the user asked for them.
        if self.submodules:
            with working_dir(self.stage.source_path):
                args = ['submodule', 'update', '--init', '--recursive']
                if not spack.config.get('config:debug'):
                    args.insert(1, '--quiet')
                git(*args)
Пример #57
0
 def fetch(self):
     # already fetched
     tty.msg("Using source directory, nothing to fetch")
     tty.msg("%s --> %s" % (self.url, self.stage.source_path))
     os.symlink(self.url, self.stage.source_path)
     return True
Пример #58
0
def repo_create(args):
    """Create a new package repository."""
    full_path, namespace = create_repo(args.directory, args.namespace)
    tty.msg("Created repo with namespace '%s'." % namespace)
    tty.msg("To register it with spack, run this command:",
            'spack repo add %s' % full_path)
Пример #59
0
def name_only(pkgs):
    indent = 0
    if sys.stdout.isatty():
        tty.msg("%d packages." % len(pkgs))
    colify(pkgs, indent=indent)
Пример #60
0
def _createtarball(env,
                   spec_yaml=None,
                   packages=None,
                   add_spec=True,
                   add_deps=True,
                   output_location=os.getcwd(),
                   signing_key=None,
                   force=False,
                   make_relative=False,
                   unsigned=False,
                   allow_root=False,
                   rebuild_index=False):
    if spec_yaml:
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            package = '/{0}'.format(s.dag_hash())
            matches = find_matching_specs(package, env=env)

    elif packages:
        matches = find_matching_specs(packages, env=env)

    elif env:
        matches = [env.specs_by_hash[h] for h in env.concretized_order]

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package spec, an active environment," +
                " or else a path to a yaml file containing a spec" +
                " to install")
    specs = set()

    mirror = spack.mirror.MirrorCollection().lookup(output_location)
    outdir = url_util.format(mirror.push_url)

    msg = 'Buildcache files will be output to %s/build_cache' % outdir
    tty.msg(msg)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            lookup = spack.store.db.query_one(match)

            if not add_spec:
                tty.debug('skipping matching root spec %s' % match.format())
            elif lookup is None:
                tty.debug('skipping uninstalled matching spec %s' %
                          match.format())
            else:
                tty.debug('adding matching spec %s' % match.format())
                specs.add(match)

            if not add_deps:
                continue

            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                # skip root, since it's handled above
                if d == 0:
                    continue

                lookup = spack.store.db.query_one(node)

                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                elif lookup is None:
                    tty.debug('skipping uninstalled depenendency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.debug('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, force, make_relative, unsigned,
                                  allow_root, signing_key, rebuild_index)
        except bindist.NoOverwriteException as e:
            tty.warn(e)