Esempio n. 1
0
 def install(spec, kwargs):
     env = ev.get_env(cli_args, 'install', required=False)
     if env:
         env.install(abstract_spec, spec, **kwargs)
         env.write()
     else:
         spec.package.do_install(**kwargs)
Esempio n. 2
0
File: env.py Progetto: LLNL/spack
def env_status(args):
    env = ev.get_env(args, 'env status', required=False)
    if env:
        if env.path == os.getcwd():
            tty.msg('Using %s in current directory: %s'
                    % (ev.manifest_name, env.path))
        else:
            tty.msg('In environment %s' % env.name)
    else:
        tty.msg('No active environment')
Esempio n. 3
0
File: add.py Progetto: LLNL/spack
def add(parser, args):
    env = ev.get_env(args, 'add')

    for spec in spack.cmd.parse_specs(args.specs):
        if not env.add(spec):
            tty.msg("Package {0} was already added to {1}"
                    .format(spec.name, env.name))
        else:
            tty.msg('Adding %s to environment %s' % (spec, env.name))
    env.write()
Esempio n. 4
0
File: remove.py Progetto: LLNL/spack
def remove(parser, args):
    env = ev.get_env(args, 'remove')

    if args.all:
        env.clear()
    else:
        for spec in spack.cmd.parse_specs(args.specs):
            tty.msg('Removing %s from environment %s' % (spec, env.name))
            env.remove(spec, force=args.force)
    env.write()
Esempio n. 5
0
def find(parser, args):
    q_args = query_arguments(args)
    # Query the current store or the internal bootstrap store if required
    if args.bootstrap:
        msg = 'Showing internal bootstrap store at "{0}"'
        tty.msg(msg.format(spack.paths.user_bootstrap_store))
        with spack.store.use_store(spack.paths.user_bootstrap_store):
            results = args.specs(**q_args)
    else:
        results = args.specs(**q_args)

    decorator = lambda s, f: f
    added = set()
    removed = set()

    env = ev.get_env(args, 'find')
    if env:
        decorator, added, roots, removed = setup_env(env)

    # use groups by default except with format.
    if args.groups is None:
        args.groups = not args.format

    # Exit early with an error code if no package matches the constraint
    if not results and args.constraint:
        msg = "No package matches the query: {0}"
        msg = msg.format(' '.join(args.constraint))
        tty.msg(msg)
        return 1

    # If tags have been specified on the command line, filter by tags
    if args.tags:
        packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
        results = [x for x in results if x.name in packages_with_tags]

    if args.loaded:
        hashes = os.environ.get(uenv.spack_loaded_hashes_var, '').split(':')
        results = [x for x in results if x.dag_hash() in hashes]

    # Display the result
    if args.json:
        cmd.display_specs_as_json(results, deps=args.deps)
    else:
        if not args.format:
            if env:
                display_env(env, args, decorator)
        if sys.stdout.isatty() and args.groups:
            tty.msg("%s" % plural(len(results), 'installed package'))
        cmd.display_specs(results, args, decorator=decorator, all_headers=True)
Esempio n. 6
0
def undevelop(parser, args):
    env = ev.get_env(args, 'undevelop', required=True)

    if args.all:
        specs = env.dev_specs.keys()
    else:
        specs = spack.cmd.parse_specs(args.specs)

    with env.write_transaction():
        changed = False
        for spec in specs:
            tty.msg('Removing %s from environment %s development specs' %
                    (spec, env.name))
            changed |= env.undevelop(spec)
        if changed:
            env.write()
Esempio n. 7
0
def find(parser, args):
    q_args = query_arguments(args)
    results = args.specs(**q_args)

    decorator = lambda s, f: f
    added = set()
    removed = set()

    env = ev.get_env(args, 'find', required=False)
    if env:
        decorator, added, roots, removed = setup_env(env)

    # Exit early if no package matches the constraint
    if not results and args.constraint:
        msg = "No package matches the query: {0}"
        msg = msg.format(' '.join(args.constraint))
        tty.msg(msg)
        return

    # If tags have been specified on the command line, filter by tags
    if args.tags:
        packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
        results = [x for x in results if x.name in packages_with_tags]

    # Display the result
    if env:
        tty.msg('In environment %s' % env.name)

        if not env.user_specs:
            tty.msg('No root specs')
        else:
            tty.msg('Root specs')
            display_specs(env.user_specs,
                          args,
                          decorator=lambda s, f: color.colorize('@*{%s}' % f))
        print()

        if args.show_concretized:
            tty.msg('Concretized roots')
            display_specs(env.specs_by_hash.values(),
                          args,
                          decorator=decorator)
            print()

    tty.msg("%s" % plural(len(results), 'installed package'))

    display_specs(results, args, decorator=decorator, all_headers=True)
Esempio n. 8
0
File: find.py Progetto: LLNL/spack
def find(parser, args):
    q_args = query_arguments(args)
    results = args.specs(**q_args)

    decorator = lambda s, f: f
    added = set()
    removed = set()

    env = ev.get_env(args, 'find', required=False)
    if env:
        decorator, added, roots, removed = setup_env(env)

    # Exit early if no package matches the constraint
    if not results and args.constraint:
        msg = "No package matches the query: {0}"
        msg = msg.format(' '.join(args.constraint))
        tty.msg(msg)
        return

    # If tags have been specified on the command line, filter by tags
    if args.tags:
        packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
        results = [x for x in results if x.name in packages_with_tags]

    # Display the result
    if env:
        tty.msg('In environment %s' % env.name)

        if not env.user_specs:
            tty.msg('No root specs')
        else:
            tty.msg('Root specs')
            display_specs(
                env.user_specs, args,
                decorator=lambda s, f: color.colorize('@*{%s}' % f))
        print()

        if args.show_concretized:
            tty.msg('Concretized roots')
            display_specs(
                env.specs_by_hash.values(), args, decorator=decorator)
            print()

    tty.msg("%s" % plural(len(results), 'installed package'))

    display_specs(results, args, decorator=decorator, all_headers=True)
Esempio n. 9
0
def createtarball(args):
    """create a binary package from an existing install"""

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    output_location = None
    if args.directory:
        output_location = args.directory

        # User meant to provide a path to a local directory.
        # Ensure that they did not accidentally pass a URL.
        scheme = url_util.parse(output_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--directory" expected a local path; got a URL, instead')

        # User meant to provide a path to a local directory.
        # Ensure that the mirror lookup does not mistake it for a named mirror.
        output_location = 'file://' + output_location

    elif args.mirror_name:
        output_location = args.mirror_name

        # User meant to provide the name of a preconfigured mirror.
        # Ensure that the mirror lookup actually returns a named mirror.
        result = spack.mirror.MirrorCollection().lookup(output_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=output_location))

    elif args.mirror_url:
        output_location = args.mirror_url

        # User meant to provide a URL for an anonymous mirror.
        # Ensure that they actually provided a URL.
        scheme = url_util.parse(output_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=output_location))
    add_spec = ('package' in args.things_to_install)
    add_deps = ('dependencies' in args.things_to_install)

    _createtarball(env, args.spec_yaml, args.specs, add_spec, add_deps,
                   output_location, args.key, args.force, args.rel,
                   args.unsigned, args.allow_root, args.no_rebuild_index)
Esempio n. 10
0
def dependencies(parser, args):
    specs = spack.cmd.parse_specs(args.spec)
    if len(specs) != 1:
        tty.die("spack dependencies takes only one spec.")

    if args.installed:
        env = ev.get_env(args, 'dependencies')
        spec = spack.cmd.disambiguate_spec(specs[0], env)

        format_string = '{name}{@version}{%compiler}{/hash:7}'
        tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
        deps = spack.store.db.installed_relatives(spec,
                                                  'children',
                                                  args.transitive,
                                                  deptype=args.deptype)
        if deps:
            spack.cmd.display_specs(deps, long=True)
        else:
            print("No dependencies")

    else:
        spec = specs[0]

        if not spec.virtual:
            packages = [spec.package]
        else:
            packages = [
                spack.repo.get(s.name)
                for s in spack.repo.path.providers_for(spec)
            ]

        dependencies = set()
        for pkg in packages:
            possible = pkg.possible_dependencies(args.transitive,
                                                 args.expand_virtuals,
                                                 deptype=args.deptype)
            dependencies.update(possible)

        if spec.name in dependencies:
            dependencies.remove(spec.name)

        if dependencies:
            colify(sorted(dependencies))
        else:
            print("No dependencies")
Esempio n. 11
0
def env_view(args):
    env = ev.get_env(args, 'env view')

    if env:
        if args.action == ViewAction.regenerate:
            env.regenerate_views()
        elif args.action == ViewAction.enable:
            if args.view_path:
                view_path = args.view_path
            else:
                view_path = env.view_path_default
            env.update_default_view(view_path)
            env.write()
        elif args.action == ViewAction.disable:
            env.update_default_view(None)
            env.write()
    else:
        tty.msg("No active environment")
Esempio n. 12
0
def fetch(parser, args):
    if args.specs:
        specs = spack.cmd.parse_specs(args.specs, concretize=True)
    else:
        # No specs were given explicitly, check if we are in an
        # environment. If yes, check the missing argument, if yes
        # fetch all uninstalled specs from it otherwise fetch all.
        # If we are also not in an environment, complain to the
        # user that we don't know what to do.
        env = ev.get_env(args, "fetch")
        if env:
            if args.missing:
                specs = env.uninstalled_specs()
            else:
                specs = env.all_specs()
            if specs == []:
                tty.die("No uninstalled specs in environment. Did you "
                        "run `spack concretize` yet?")
        else:
            tty.die("fetch requires at least one spec argument")

    if args.no_checksum:
        spack.config.set("config:checksum", False, scope="command_line")

    if args.deprecated:
        spack.config.set('config:deprecated', True, scope='command_line')

    for spec in specs:
        if args.missing or args.dependencies:
            for s in spec.traverse():
                package = spack.repo.get(s)

                # Skip already-installed packages with --missing
                if args.missing and package.installed:
                    continue

                # Do not attempt to fetch externals (they're local)
                if package.spec.external:
                    continue

                package.do_fetch()

        package = spack.repo.get(spec)
        package.do_fetch()
Esempio n. 13
0
File: config.py Progetto: LLNL/spack
def _get_scope_and_section(args):
    """Extract config scope and section from arguments."""
    scope = args.scope
    section = args.section

    # w/no args and an active environment, point to env manifest
    if not args.section:
        env = ev.get_env(args, 'config edit', required=False)
        if env:
            scope = env.env_file_config_scope_name()

    # set scope defaults
    elif not args.scope:
        if section == 'compilers':
            scope = spack.config.default_modify_scope()
        else:
            scope = 'user'

    return scope, section
Esempio n. 14
0
def external_status(args):
    """spack external status"""
    env = ev.get_env(args, 'env status')
    if env:
        env.concretize()

        def _tree_to_display(spec):
            return spec.tree(recurse_dependencies=True,
                             status_fn=spack.spec.Spec.install_status,
                             hashlen=8,
                             hashes=True,
                             show_external=True)

        for user_spec, concrete_spec in env.concretized_specs():
            tty.msg('Concretized {0}'.format(user_spec))
            sys.stdout.write(_tree_to_display(concrete_spec))
            print('')
    else:
        tty.msg('No active environment')
Esempio n. 15
0
def _get_scope_and_section(args):
    """Extract config scope and section from arguments."""
    scope = args.scope
    section = args.section

    # w/no args and an active environment, point to env manifest
    if not args.section:
        env = ev.get_env(args, 'config edit')
        if env:
            scope = env.env_file_config_scope_name()

    # set scope defaults
    elif not args.scope:
        if section == 'compilers':
            scope = spack.config.default_modify_scope()
        else:
            scope = 'user'

    return scope, section
Esempio n. 16
0
def stage(parser, args):
    if not args.specs:
        env = ev.get_env(args, 'stage')
        if env:
            tty.msg("Staging specs from environment %s" % env.name)
            for spec in env.specs_by_hash.values():
                for dep in spec.traverse():
                    dep.package.do_stage()
            return
        else:
            tty.die("`spack stage` requires a spec or an active environment")

    if args.no_checksum:
        spack.config.set('config:checksum', False, scope='command_line')

    specs = spack.cmd.parse_specs(args.specs, concretize=True)
    for spec in specs:
        package = spack.repo.get(spec)
        if args.path:
            package.path = args.path
        package.do_stage()
Esempio n. 17
0
File: env.py Progetto: LLNL/spack
def env_loads(args):
    env = ev.get_env(args, 'env loads')

    # Set the module types that have been selected
    module_type = args.module_type
    if module_type is None:
        # If no selection has been made select all of them
        module_type = 'tcl'

    recurse_dependencies = args.recurse_dependencies
    args.recurse_dependencies = False

    loads_file = fs.join_path(env.path, 'loads')
    with open(loads_file, 'w') as f:
        specs = env._get_environment_specs(
            recurse_dependencies=recurse_dependencies)

        spack.cmd.modules.loads(module_type, specs, args, f)

    print('To load this environment, type:')
    print('   source %s' % loads_file)
Esempio n. 18
0
def diff(parser, args):
    env = ev.get_env(args, 'diff')

    if len(args.specs) != 2:
        tty.die("You must provide two specs to diff.")

    specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
             for spec in spack.cmd.parse_specs(args.specs)]

    # Calculate the comparison (c)
    color = False if args.dump_json else get_color_when()
    c = compare_specs(specs[0], specs[1], to_string=True, color=color)

    # Default to all attributes
    attributes = args.attribute or ["all"]

    if args.dump_json:
        print(sjson.dump(c))
    else:
        tty.warn("This interface is subject to change.\n")
        print_difference(c, attributes)
Esempio n. 19
0
def env_loads(args):
    env = ev.get_env(args, 'env loads', required=True)

    # Set the module types that have been selected
    module_type = args.module_type
    if module_type is None:
        # If no selection has been made select all of them
        module_type = 'tcl'

    recurse_dependencies = args.recurse_dependencies
    args.recurse_dependencies = False

    loads_file = fs.join_path(env.path, 'loads')
    with open(loads_file, 'w') as f:
        specs = env._get_environment_specs(
            recurse_dependencies=recurse_dependencies)

        spack.cmd.modules.loads(module_type, specs, args, f)

    print('To load this environment, type:')
    print('   source %s' % loads_file)
Esempio n. 20
0
File: stage.py Progetto: LLNL/spack
def stage(parser, args):
    if not args.specs:
        env = ev.get_env(args, 'stage', required=False)
        if env:
            tty.msg("Staging specs from environment %s" % env.name)
            for spec in env.specs_by_hash.values():
                for dep in spec.traverse():
                    dep.package.do_stage()
            return
        else:
            tty.die("`spack stage` requires a spec or an active environment")

    if args.no_checksum:
        spack.config.set('config:checksum', False, scope='command_line')

    specs = spack.cmd.parse_specs(args.specs, concretize=True)
    for spec in specs:
        package = spack.repo.get(spec)
        if args.path:
            package.path = args.path
        package.do_stage()
Esempio n. 21
0
def ci_generate(args):
    """Generate jobs file from a spack environment file containing CI info.
       Before invoking this command, you can set the environment variable
       SPACK_CDASH_AUTH_TOKEN to contain the CDash authorization token
       for creating a build group for the generated workload and registering
       all generated jobs under that build group.  If this environment
       variable is not set, no build group will be created on CDash."""
    env = ev.get_env(args, 'ci generate', required=True)

    output_file = args.output_file
    copy_yaml_to = args.copy_to
    run_optimizer = args.optimize
    use_dependencies = args.dependencies
    prune_dag = args.prune_dag
    index_only = args.index_only
    artifacts_root = args.artifacts_root

    if not output_file:
        output_file = os.path.abspath(".gitlab-ci.yml")
    else:
        output_file_path = os.path.abspath(output_file)
        gen_ci_dir = os.path.dirname(output_file_path)
        if not os.path.exists(gen_ci_dir):
            os.makedirs(gen_ci_dir)

    # Generate the jobs
    spack_ci.generate_gitlab_ci_yaml(env,
                                     True,
                                     output_file,
                                     prune_dag=prune_dag,
                                     check_index_only=index_only,
                                     run_optimizer=run_optimizer,
                                     use_dependencies=use_dependencies,
                                     artifacts_root=artifacts_root)

    if copy_yaml_to:
        copy_to_dir = os.path.dirname(copy_yaml_to)
        if not os.path.exists(copy_to_dir):
            os.makedirs(copy_to_dir)
        shutil.copyfile(output_file, copy_yaml_to)
Esempio n. 22
0
def stage(parser, args):
    # We temporarily modify the working directory when setting up a stage, so we need to
    # convert this to an absolute path here in order for it to remain valid later.
    custom_path = os.path.abspath(args.path) if args.path else None
    if custom_path:
        spack.stage.create_stage_root(custom_path)

    if not args.specs:
        env = ev.get_env(args, 'stage')
        if env:
            tty.msg("Staging specs from environment %s" % env.name)
            for spec in env.specs_by_hash.values():
                for dep in spec.traverse():
                    dep.package.do_stage()
                    tty.msg("Staged {0} in {1}".format(dep.package.name,
                                                       dep.package.stage.path))
            return
        else:
            tty.die("`spack stage` requires a spec or an active environment")

    if args.no_checksum:
        spack.config.set('config:checksum', False, scope='command_line')

    if args.deprecated:
        spack.config.set('config:deprecated', True, scope='command_line')

    specs = spack.cmd.parse_specs(args.specs, concretize=False)

    # prevent multiple specs from extracting in the same folder
    if len(specs) > 1 and custom_path:
        tty.die("`--path` requires a single spec, but multiple were provided")

    for spec in specs:
        spec = spack.cmd.matching_spec_from_env(spec)
        package = spack.repo.get(spec)
        if custom_path:
            package.path = custom_path
        package.do_stage()
        tty.msg("Staged {0} in {1}".format(package.name, package.stage.path))
Esempio n. 23
0
def test_list(args):
    """List installed packages with available tests."""
    if args.list_all:
        all_packages_with_tests = [
            pkg_class.name
            for pkg_class in spack.repo.path.all_package_classes()
            if has_test_method(pkg_class)
        ]
        if sys.stdout.isatty():
            tty.msg("%d packages with tests." % len(all_packages_with_tests))
        colify.colify(all_packages_with_tests)
        return

    # TODO: This can be extended to have all of the output formatting options
    # from `spack find`.
    env = ev.get_env(args, 'test')
    hashes = env.all_hashes() if env else None

    specs = spack.store.db.query(hashes=hashes)
    specs = list(filter(lambda s: has_test_method(s.package_class), specs))

    spack.cmd.display_specs(specs, long=True)
Esempio n. 24
0
def install_spec(cli_args, kwargs, abstract_spec, spec):
    """Do the actual installation."""

    try:
        # handle active environment, if any
        env = ev.get_env(cli_args, 'install')
        if env:
            env.install(abstract_spec, spec, **kwargs)
            env.write()
        else:
            spec.package.do_install(**kwargs)

    except spack.build_environment.InstallError as e:
        if cli_args.show_log_on_error:
            e.print_context()
            if not os.path.exists(e.pkg.build_log_path):
                tty.error("'spack install' created no log.")
            else:
                sys.stderr.write('Full build log:\n')
                with open(e.pkg.build_log_path) as log:
                    shutil.copyfileobj(log, sys.stderr)
        raise
Esempio n. 25
0
def env_activate(args):
    env = args.activate_env
    if not args.shell:
        msg = [
            "This command works best with Spack's shell support",
            ""
        ] + spack.cmd.common.shell_init_instructions + [
            'Or, if you want to use `spack env activate` without initializing',
            'shell support, you can run one of these:',
            '',
            '    eval `spack env activate --sh %s`   # for bash/sh' % env,
            '    eval `spack env activate --csh %s`  # for csh/tcsh' % env,
        ]
        tty.msg(*msg)
        return 1

    if ev.exists(env) and not args.dir:
        spack_env = ev.root(env)
        short_name = env
        env_prompt = '[%s]' % env

    elif ev.is_env_dir(env):
        spack_env = os.path.abspath(env)
        short_name = os.path.basename(os.path.abspath(env))
        env_prompt = '[%s]' % short_name

    else:
        tty.die("No such environment: '%s'" % env)

    if spack_env == os.environ.get('SPACK_ENV'):
        tty.die("Environment %s is already active" % args.activate_env)

    active_env = ev.get_env(namedtuple('args', ['env'])(env),
                            'activate')
    cmds = ev.activate(
        active_env, add_view=args.with_view, shell=args.shell,
        prompt=env_prompt if args.prompt else None
    )
    sys.stdout.write(cmds)
Esempio n. 26
0
def uninstall_specs(args, specs):
    env = ev.get_env(args, 'uninstall')

    uninstall_list, remove_list = get_uninstall_list(args, specs, env)
    anything_to_do = set(uninstall_list).union(set(remove_list))

    if not anything_to_do:
        tty.warn('There are no package to uninstall.')
        return

    if not args.yes_to_all:
        tty.msg('The following packages will be uninstalled:\n')
        spack.cmd.display_specs(anything_to_do, **display_args)
        answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
        if not answer:
            tty.die('Will not uninstall any packages.')

    # just force-remove things in the remove list
    for spec in remove_list:
        _remove_from_env(spec, env)

    # Uninstall everything on the list
    do_uninstall(env, uninstall_list, args.force)
Esempio n. 27
0
def uninstall_specs(args, specs):
    env = ev.get_env(args, 'uninstall')

    uninstall_list, remove_list = get_uninstall_list(args, specs, env)
    anything_to_do = set(uninstall_list).union(set(remove_list))

    if not anything_to_do:
        tty.warn('There are no package to uninstall.')
        return

    if not args.yes_to_all:
        confirm_removal(anything_to_do)

    if env:
        # Remove all the specs that are supposed to be uninstalled or just
        # removed.
        with env.write_transaction():
            for spec in itertools.chain(remove_list, uninstall_list):
                _remove_from_env(spec, env)
            env.write()

    # Uninstall everything on the list
    do_uninstall(env, uninstall_list, args.force)
Esempio n. 28
0
def uninstall_specs(args, specs):
    env = ev.get_env(args, 'uninstall', required=False)

    uninstall_list, remove_list = get_uninstall_list(args, specs, env)
    anything_to_do = set(uninstall_list).union(set(remove_list))

    if not anything_to_do:
        tty.warn('There are no package to uninstall.')
        return

    if not args.yes_to_all:
        tty.msg('The following packages will be uninstalled:\n')
        spack.cmd.display_specs(anything_to_do, **display_args)
        answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
        if not answer:
            tty.die('Will not uninstall any packages.')

    # just force-remove things in the remove list
    for spec in remove_list:
        _remove_from_env(spec, env)

    # Uninstall everything on the list
    do_uninstall(env, uninstall_list, args.force)
Esempio n. 29
0
def _get_scope_and_section(args):
    """Extract config scope and section from arguments."""
    scope = args.scope
    section = getattr(args, 'section', None)
    path = getattr(args, 'path', None)

    # w/no args and an active environment, point to env manifest
    if not section:
        env = ev.get_env(args, 'config edit')
        if env:
            scope = env.env_file_config_scope_name()

    # set scope defaults
    elif not scope:
        scope = spack.config.default_modify_scope(section)

    # special handling for commands that take value instead of section
    if path:
        section = path[:path.find(':')] if ':' in path else path
        if not scope:
            scope = spack.config.default_modify_scope(section)

    return scope, section
Esempio n. 30
0
def dependencies(parser, args):
    specs = spack.cmd.parse_specs(args.spec)
    if len(specs) != 1:
        tty.die("spack dependencies takes only one spec.")

    if args.installed:
        env = ev.get_env(args, 'dependencies')
        spec = spack.cmd.disambiguate_spec(specs[0], env)

        format_string = '{name}{@version}{%compiler}{/hash:7}'
        if sys.stdout.isatty():
            tty.msg("Dependencies of %s" %
                    spec.format(format_string, color=True))
        deps = spack.store.db.installed_relatives(spec,
                                                  'children',
                                                  args.transitive,
                                                  deptype=args.deptype)
        if deps:
            spack.cmd.display_specs(deps, long=True)
        else:
            print("No dependencies")

    else:
        spec = specs[0]
        dependencies = spack.package.possible_dependencies(
            spec,
            transitive=args.transitive,
            expand_virtuals=args.expand_virtuals,
            deptype=args.deptype)

        if spec.name in dependencies:
            del dependencies[spec.name]

        if dependencies:
            colify(sorted(dependencies))
        else:
            print("No dependencies")
Esempio n. 31
0
def find(parser, args):
    q_args = query_arguments(args)
    results = args.specs(**q_args)

    decorator = lambda s, f: f
    added = set()
    removed = set()

    env = ev.get_env(args, 'find')
    if env:
        decorator, added, roots, removed = setup_env(env)

    # use groups by default except with format.
    if args.groups is None:
        args.groups = not args.format

    # Exit early with an error code if no package matches the constraint
    if not results and args.constraint:
        msg = "No package matches the query: {0}"
        msg = msg.format(' '.join(args.constraint))
        tty.msg(msg)
        return 1

    # If tags have been specified on the command line, filter by tags
    if args.tags:
        packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
        results = [x for x in results if x.name in packages_with_tags]

    # Display the result
    if args.json:
        cmd.display_specs_as_json(results, deps=args.deps)
    else:
        if env:
            display_env(env, args, decorator)
        if args.groups:
            tty.msg("%s" % plural(len(results), 'installed package'))
        cmd.display_specs(results, args, decorator=decorator, all_headers=True)
Esempio n. 32
0
def activate(parser, args):
    specs = spack.cmd.parse_specs(args.spec)
    if len(specs) != 1:
        tty.die("activate requires one spec.  %d given." % len(specs))

    env = ev.get_env(args, 'activate')
    spec = spack.cmd.disambiguate_spec(specs[0], env)
    if not spec.package.is_extension:
        tty.die("%s is not an extension." % spec.name)

    if args.view:
        target = args.view
    else:
        target = spec.package.extendee_spec.prefix

    view = YamlFilesystemView(target, spack.store.layout)

    if spec.package.is_activated(view):
        tty.msg("Package %s is already activated." % specs[0].short_spec)
        return

    # TODO: refactor FilesystemView.add_extension and use that here (so there
    # aren't two ways of activating extensions)
    spec.package.do_activate(view, with_dependencies=not args.force)
Esempio n. 33
0
def load(parser, args):
    env = ev.get_env(args, 'load')
    specs = [
        spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
        for spec in spack.cmd.parse_specs(args.specs)
    ]

    if not args.shell:
        specs_string = ' '.join(args.specs)
        msg = [
            "This command works best with Spack's shell support", ""
        ] + spack.cmd.common.shell_init_instructions + [
            'Or, if you want to use `spack load` without initializing',
            'shell support, you can run one of these:',
            '',
            '    eval `spack load --sh %s`   # for bash/sh' % specs_string,
            '    eval `spack load --csh %s`  # for csh/tcsh' % specs_string,
        ]
        tty.msg(*msg)
        return 1

    with spack.store.db.read_transaction():
        if 'dependencies' in args.things_to_load:
            include_roots = 'package' in args.things_to_load
            specs = [
                dep for spec in specs
                for dep in spec.traverse(root=include_roots, order='post')
            ]

        env_mod = spack.util.environment.EnvironmentModifications()
        for spec in specs:
            env_mod.extend(uenv.environment_modifications_for_spec(spec))
            env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
        cmds = env_mod.shell_modifications(args.shell)

        sys.stdout.write(cmds)
Esempio n. 34
0
def view(parser, args):
    'Produce a view of a set of packages.'

    specs = spack.cmd.parse_specs(args.specs)
    path = args.path[0]

    if args.action in actions_link and args.projection_file:
        # argparse confirms file exists
        with open(args.projection_file, 'r') as f:
            projections_data = s_yaml.load(f)
            validate(projections_data, spack.schema.projections.schema)
            ordered_projections = projections_data['projections']
    else:
        ordered_projections = {}

    # What method are we using for this view
    if args.action in ("hardlink", "hard"):
        link_fn = view_hardlink
    elif args.action in ("copy", "relocate"):
        link_fn = view_copy
    else:
        link_fn = view_symlink

    view = YamlFilesystemView(path,
                              spack.store.layout,
                              projections=ordered_projections,
                              ignore_conflicts=getattr(args,
                                                       "ignore_conflicts",
                                                       False),
                              link=link_fn,
                              verbose=args.verbose)

    # Process common args and specs
    if getattr(args, "all", False):
        specs = view.get_all_specs()
        if len(specs) == 0:
            tty.warn("Found no specs in %s" % path)

    elif args.action in actions_link:
        # only link commands need to disambiguate specs
        env = ev.get_env(args, 'view link')
        specs = [spack.cmd.disambiguate_spec(s, env) for s in specs]

    elif args.action in actions_status:
        # no specs implies all
        if len(specs) == 0:
            specs = view.get_all_specs()
        else:
            specs = disambiguate_in_view(specs, view)

    else:
        # status and remove can map a partial spec to packages in view
        specs = disambiguate_in_view(specs, view)

    with_dependencies = args.dependencies.lower() in ['true', 'yes']

    # Map action to corresponding functionality
    if args.action in actions_link:
        try:
            view.add_specs(*specs,
                           with_dependencies=with_dependencies,
                           exclude=args.exclude)
        except MergeConflictError:
            tty.info("Some file blocked the merge, adding the '-i' flag will "
                     "ignore this conflict. For more information see e.g. "
                     "https://github.com/spack/spack/issues/9029")
            raise

    elif args.action in actions_remove:
        view.remove_specs(*specs,
                          with_dependencies=with_dependencies,
                          exclude=args.exclude,
                          with_dependents=not args.no_remove_dependents)

    elif args.action in actions_status:
        view.print_status(*specs, with_dependencies=with_dependencies)

    else:
        tty.error('Unknown action: "%s"' % args.action)
Esempio n. 35
0
def install(parser, args, **kwargs):
    if not args.package and not args.specfiles:
        # if there are no args but an active environment or spack.yaml file
        # then install the packages from it.
        env = ev.get_env(args, 'install', required=False)
        if env:
            if not args.only_concrete:
                env.concretize()
                env.write()
            tty.msg("Installing environment %s" % env.name)
            env.install_all(args)
            return
        else:
            tty.die("install requires a package argument or a spack.yaml file")

    if args.jobs is not None:
        if args.jobs <= 0:
            tty.die("The -j option must be a positive integer!")

    if args.no_checksum:
        spack.config.set('config:checksum', False, scope='command_line')

    # Parse cli arguments and construct a dictionary
    # that will be passed to Package.do_install API
    update_kwargs_from_args(args, kwargs)
    kwargs.update({
        'install_dependencies': ('dependencies' in args.things_to_install),
        'install_package': ('package' in args.things_to_install)
    })

    if args.run_tests:
        tty.warn("Deprecated option: --run-tests: use --test=all instead")

    # 1. Abstract specs from cli
    reporter = spack.report.collect_info(args.log_format,
                                         ' '.join(args.package),
                                         args.cdash_upload_url)
    if args.log_file:
        reporter.filename = args.log_file

    abstract_specs = spack.cmd.parse_specs(args.package)
    tests = False
    if args.test == 'all' or args.run_tests:
        tests = True
    elif args.test == 'root':
        tests = [spec.name for spec in abstract_specs]
    kwargs['tests'] = tests

    try:
        specs = spack.cmd.parse_specs(
            args.package, concretize=True, tests=tests)
    except SpackError as e:
        reporter.concretization_report(e.message)
        raise

    # 2. Concrete specs from yaml files
    for file in args.specfiles:
        with open(file, 'r') as f:
            s = spack.spec.Spec.from_yaml(f)

        if s.concretized().dag_hash() != s.dag_hash():
            msg = 'skipped invalid file "{0}". '
            msg += 'The file does not contain a concrete spec.'
            tty.warn(msg.format(file))
            continue

        specs.append(s.concretized())

    if len(specs) == 0:
        tty.die('The `spack install` command requires a spec to install.')

    if not args.log_file and not reporter.filename:
        reporter.filename = default_log_file(specs[0])
    reporter.specs = specs
    with reporter:
        if args.overwrite:
            # If we asked to overwrite an existing spec we must ensure that:
            # 1. We have only one spec
            # 2. The spec is already installed
            assert len(specs) == 1, \
                "only one spec is allowed when overwriting an installation"

            spec = specs[0]
            t = spack.store.db.query(spec)
            assert len(t) == 1, "to overwrite a spec you must install it first"

            # Give the user a last chance to think about overwriting an already
            # existing installation
            if not args.yes_to_all:
                tty.msg('The following package will be reinstalled:\n')

                display_args = {
                    'long': True,
                    'show_flags': True,
                    'variants': True
                }

                spack.cmd.display_specs(t, **display_args)
                answer = tty.get_yes_or_no(
                    'Do you want to proceed?', default=False
                )
                if not answer:
                    tty.die('Reinstallation aborted.')

            with fs.replace_directory_transaction(specs[0].prefix):
                install_spec(args, kwargs, abstract_specs[0], specs[0])

        else:
            for abstract, concrete in zip(abstract_specs, specs):
                install_spec(args, kwargs, abstract, concrete)
Esempio n. 36
0
def set_config(args, section, new, scope):
    if re.match(r'env.*', scope):
        e = ev.get_env(args, 'config add')
        e.set_config(section, new)
    else:
        spack.config.set(section, new, scope=scope)
Esempio n. 37
0
def ci_rebuild(args):
    """Check a single spec against the remote mirror, and rebuild it from
       source if the mirror does not contain the full hash match of the spec
       as computed locally. """
    env = ev.get_env(args, 'ci rebuild', required=True)

    # Make sure the environment is "gitlab-enabled", or else there's nothing
    # to do.
    yaml_root = ev.config_dict(env.yaml)
    gitlab_ci = None
    if 'gitlab-ci' in yaml_root:
        gitlab_ci = yaml_root['gitlab-ci']

    if not gitlab_ci:
        tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')

    # Grab the environment variables we need.  These either come from the
    # pipeline generation step ("spack ci generate"), where they were written
    # out as variables, or else provided by GitLab itself.
    pipeline_artifacts_dir = get_env_var('SPACK_ARTIFACTS_ROOT')
    job_log_dir = get_env_var('SPACK_JOB_LOG_DIR')
    repro_dir = get_env_var('SPACK_JOB_REPRO_DIR')
    local_mirror_dir = get_env_var('SPACK_LOCAL_MIRROR_DIR')
    concrete_env_dir = get_env_var('SPACK_CONCRETE_ENV_DIR')
    ci_pipeline_id = get_env_var('CI_PIPELINE_ID')
    ci_job_name = get_env_var('CI_JOB_NAME')
    signing_key = get_env_var('SPACK_SIGNING_KEY')
    root_spec = get_env_var('SPACK_ROOT_SPEC')
    job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
    compiler_action = get_env_var('SPACK_COMPILER_ACTION')
    cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
    related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
    spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
    pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
    remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')

    # Construct absolute paths relative to current $CI_PROJECT_DIR
    ci_project_dir = get_env_var('CI_PROJECT_DIR')
    pipeline_artifacts_dir = os.path.join(ci_project_dir,
                                          pipeline_artifacts_dir)
    job_log_dir = os.path.join(ci_project_dir, job_log_dir)
    repro_dir = os.path.join(ci_project_dir, repro_dir)
    local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
    concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)

    # Debug print some of the key environment variables we should have received
    tty.debug('pipeline_artifacts_dir = {0}'.format(pipeline_artifacts_dir))
    tty.debug('root_spec = {0}'.format(root_spec))
    tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url))
    tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
    tty.debug('compiler_action = {0}'.format(compiler_action))

    # Query the environment manifest to find out whether we're reporting to a
    # CDash instance, and if so, gather some information from the manifest to
    # support that task.
    enable_cdash = False
    if 'cdash' in yaml_root:
        enable_cdash = True
        ci_cdash = yaml_root['cdash']
        job_spec_buildgroup = ci_cdash['build-group']
        cdash_base_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        proj_enc = urlencode({'project': cdash_project})
        eq_idx = proj_enc.find('=') + 1
        cdash_project_enc = proj_enc[eq_idx:]
        cdash_site = ci_cdash['site']
        tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
        tty.debug('cdash_project = {0}'.format(cdash_project))
        tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
        tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
        tty.debug('cdash_site = {0}'.format(cdash_site))
        tty.debug('related_builds = {0}'.format(related_builds))
        tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))

    # Is this a pipeline run on a spack PR or a merge to develop?  It might
    # be neither, e.g. a pipeline run on some environment repository.
    spack_is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
    spack_is_develop_pipeline = spack_pipeline_type == 'spack_protected_branch'

    tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
        spack_is_pr_pipeline, spack_is_develop_pipeline))

    # Figure out what is our temporary storage mirror: Is it artifacts
    # buildcache?  Or temporary-storage-url-prefix?  In some cases we need to
    # force something or pipelines might not have a way to propagate build
    # artifacts from upstream to downstream jobs.
    pipeline_mirror_url = None

    temp_storage_url_prefix = None
    if 'temporary-storage-url-prefix' in gitlab_ci:
        temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']
        pipeline_mirror_url = url_util.join(temp_storage_url_prefix,
                                            ci_pipeline_id)

    enable_artifacts_mirror = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache']
        if (enable_artifacts_mirror
                or (spack_is_pr_pipeline and not enable_artifacts_mirror
                    and not temp_storage_url_prefix)):
            # If you explicitly enabled the artifacts buildcache feature, or
            # if this is a PR pipeline but you did not enable either of the
            # per-pipeline temporary storage features, we force the use of
            # artifacts buildcache.  Otherwise jobs will not have binary
            # dependencies from previous stages available since we do not
            # allow pushing binaries to the remote mirror during PR pipelines.
            enable_artifacts_mirror = True
            pipeline_mirror_url = 'file://' + local_mirror_dir
            mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format(
                pipeline_mirror_url)
            tty.debug(mirror_msg)

    # Whatever form of root_spec we got, use it to get a map giving us concrete
    # specs for this job and all of its dependencies.
    spec_map = spack_ci.get_concrete_specs(env, root_spec, job_spec_pkg_name,
                                           related_builds, compiler_action)
    job_spec = spec_map[job_spec_pkg_name]

    job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
    job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)

    # To provide logs, cdash reports, etc for developer download/perusal,
    # these things have to be put into artifacts.  This means downstream
    # jobs that "need" this job will get those artifacts too.  So here we
    # need to clean out the artifacts we may have got from upstream jobs.

    cdash_report_dir = os.path.join(pipeline_artifacts_dir, 'cdash_report')
    if os.path.exists(cdash_report_dir):
        shutil.rmtree(cdash_report_dir)

    if os.path.exists(job_log_dir):
        shutil.rmtree(job_log_dir)

    if os.path.exists(repro_dir):
        shutil.rmtree(repro_dir)

    # Now that we removed them if they existed, create the directories we
    # need for storing artifacts.  The cdash_report directory will be
    # created internally if needed.
    os.makedirs(job_log_dir)
    os.makedirs(repro_dir)

    # Copy the concrete environment files to the repro directory so we can
    # expose them as artifacts and not conflict with the concrete environment
    # files we got as artifacts from the upstream pipeline generation job.
    # Try to cast a slightly wider net too, and hopefully get the generated
    # pipeline yaml.  If we miss it, the user will still be able to go to the
    # pipeline generation job and get it from there.
    target_dirs = [concrete_env_dir, pipeline_artifacts_dir]

    for dir_to_list in target_dirs:
        for file_name in os.listdir(dir_to_list):
            src_file = os.path.join(dir_to_list, file_name)
            if os.path.isfile(src_file):
                dst_file = os.path.join(repro_dir, file_name)
                shutil.copyfile(src_file, dst_file)

    # If signing key was provided via "SPACK_SIGNING_KEY", then try to
    # import it.
    if signing_key:
        spack_ci.import_signing_key(signing_key)

    # Depending on the specifics of this job, we might need to turn on the
    # "config:install_missing compilers" option (to build this job spec
    # with a bootstrapped compiler), or possibly run "spack compiler find"
    # (to build a bootstrap compiler or one of its deps in a
    # compiler-agnostic way), or maybe do nothing at all (to build a spec
    # using a compiler already installed on the target system).
    spack_ci.configure_compilers(compiler_action)

    # Write this job's spec yaml into the reproduction directory, and it will
    # also be used in the generated "spack install" command to install the spec
    tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
    with open(job_spec_yaml_path, 'w') as fd:
        fd.write(job_spec.to_yaml(hash=ht.build_hash))

    # Write the concrete root spec yaml into the reproduction directory
    root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
    with open(root_spec_yaml_path, 'w') as fd:
        fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))

    # Write some other details to aid in reproduction into an artifact
    repro_file = os.path.join(repro_dir, 'repro.json')
    repro_details = {
        'job_name': ci_job_name,
        'job_spec_yaml': job_spec_yaml_file,
        'root_spec_yaml': 'root.yaml',
        'ci_project_dir': ci_project_dir
    }
    with open(repro_file, 'w') as fd:
        fd.write(json.dumps(repro_details))

    # Write information about spack into an artifact in the repro dir
    spack_info = spack_ci.get_spack_info()
    spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
    with open(spack_info_file, 'w') as fd:
        fd.write('\n{0}\n'.format(spack_info))

    # If we decided there should be a temporary storage mechanism, add that
    # mirror now so it's used when we check for a full hash match already
    # built for this spec.
    if pipeline_mirror_url:
        spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
                         pipeline_mirror_url, cfg.default_modify_scope())

    cdash_build_id = None
    cdash_build_stamp = None

    # Check configured mirrors for a built spec with a matching full hash
    matches = bindist.get_mirrors_for_spec(job_spec,
                                           full_hash_match=True,
                                           index_only=False)

    if matches:
        # Got a full hash match on at least one configured mirror.  All
        # matches represent the fully up-to-date spec, so should all be
        # equivalent.  If artifacts mirror is enabled, we just pick one
        # of the matches and download the buildcache files from there to
        # the artifacts, so they're available to be used by dependent
        # jobs in subsequent stages.
        tty.msg('No need to rebuild {0}, found full hash match at: '.format(
            job_spec_pkg_name))
        for match in matches:
            tty.msg('    {0}'.format(match['mirror_url']))
        if enable_artifacts_mirror:
            matching_mirror = matches[0]['mirror_url']
            build_cache_dir = os.path.join(local_mirror_dir, 'build_cache')
            tty.debug('Getting {0} buildcache from {1}'.format(
                job_spec_pkg_name, matching_mirror))
            tty.debug('Downloading to {0}'.format(build_cache_dir))
            buildcache.download_buildcache_files(job_spec, build_cache_dir,
                                                 False, matching_mirror)

        # Now we are done and successful
        sys.exit(0)

    # No full hash match anywhere means we need to rebuild spec

    # Start with spack arguments
    install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]

    config = cfg.get('config')
    if not config['verify_ssl']:
        install_args.append('-k')

    install_args.extend([
        'install',
        '--keep-stage',
        '--require-full-hash-match',
    ])

    can_verify = spack_ci.can_verify_binaries()
    verify_binaries = can_verify and spack_is_pr_pipeline is False
    if not verify_binaries:
        install_args.append('--no-check-signature')

    # If CDash reporting is enabled, we first register this build with
    # the specified CDash instance, then relate the build to those of
    # its dependencies.
    if enable_cdash:
        tty.debug('CDash: Registering build')
        (cdash_build_id, cdash_build_stamp) = spack_ci.register_cdash_build(
            cdash_build_name, cdash_base_url, cdash_project, cdash_site,
            job_spec_buildgroup)

        if cdash_build_id is not None:
            cdash_upload_url = '{0}/submit.php?project={1}'.format(
                cdash_base_url, cdash_project_enc)

            install_args.extend([
                '--cdash-upload-url',
                cdash_upload_url,
                '--cdash-build',
                cdash_build_name,
                '--cdash-site',
                cdash_site,
                '--cdash-buildstamp',
                cdash_build_stamp,
            ])

            tty.debug('CDash: Relating build with dependency builds')
            spack_ci.relate_cdash_builds(
                spec_map, cdash_base_url, cdash_build_id, cdash_project,
                [pipeline_mirror_url, pr_mirror_url, remote_mirror_url])

    # A compiler action of 'FIND_ANY' means we are building a bootstrap
    # compiler or one of its deps.
    # TODO: when compilers are dependencies, we should include --no-add
    if compiler_action != 'FIND_ANY':
        install_args.append('--no-add')

    # TODO: once we have the concrete spec registry, use the DAG hash
    # to identify the spec to install, rather than the concrete spec
    # yaml file.
    install_args.extend(['-f', job_spec_yaml_path])

    tty.debug('Installing {0} from source'.format(job_spec.name))
    tty.debug('spack install arguments: {0}'.format(install_args))

    # Write the install command to a shell script
    with open('install.sh', 'w') as fd:
        fd.write('#!/bin/bash\n\n')
        fd.write('\n# spack install command\n')
        fd.write(' '.join(['"{0}"'.format(i) for i in install_args]))
        fd.write('\n')

    st = os.stat('install.sh')
    os.chmod('install.sh', st.st_mode | stat.S_IEXEC)

    install_copy_path = os.path.join(repro_dir, 'install.sh')
    shutil.copyfile('install.sh', install_copy_path)

    # Run the generated install.sh shell script as if it were being run in
    # a login shell.
    try:
        install_process = subprocess.Popen(['bash', '-l', './install.sh'])
        install_process.wait()
        install_exit_code = install_process.returncode
    except (ValueError, subprocess.CalledProcessError, OSError) as inst:
        tty.error('Encountered error running install script')
        tty.error(inst)

    # Now do the post-install tasks
    tty.debug('spack install exited {0}'.format(install_exit_code))

    # If a spec fails to build in a spack develop pipeline, we add it to a
    # list of known broken full hashes.  This allows spack PR pipelines to
    # avoid wasting compute cycles attempting to build those hashes.
    if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
        tty.debug('Install failed on develop')
        if 'broken-specs-url' in gitlab_ci:
            broken_specs_url = gitlab_ci['broken-specs-url']
            dev_fail_hash = job_spec.full_hash()
            broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
            tty.msg('Reporting broken develop build as: {0}'.format(
                broken_spec_path))
            tmpdir = tempfile.mkdtemp()
            empty_file_path = os.path.join(tmpdir, 'empty.txt')

            broken_spec_details = {
                'broken-spec': {
                    'job-url': get_env_var('CI_JOB_URL'),
                    'pipeline-url': get_env_var('CI_PIPELINE_URL'),
                    'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
                }
            }

            try:
                with open(empty_file_path, 'w') as efd:
                    efd.write(syaml.dump(broken_spec_details))
                web_util.push_to_url(empty_file_path,
                                     broken_spec_path,
                                     keep_original=False,
                                     extra_args={'ContentType': 'text/plain'})
            except Exception as err:
                # If we got some kind of S3 (access denied or other connection
                # error), the first non boto-specific class in the exception
                # hierarchy is Exception.  Just print a warning and return
                msg = 'Error writing to broken specs list {0}: {1}'.format(
                    broken_spec_path, err)
                tty.warn(msg)
            finally:
                shutil.rmtree(tmpdir)

    # We generated the "spack install ..." command to "--keep-stage", copy
    # any logs from the staging directory to artifacts now
    spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)

    # Create buildcache on remote mirror, either on pr-specific mirror or
    # on the main mirror defined in the gitlab-enabled spack environment
    if spack_is_pr_pipeline:
        buildcache_mirror_url = pr_mirror_url
    else:
        buildcache_mirror_url = remote_mirror_url

    # If the install succeeded, create a buildcache entry for this job spec
    # and push it to one or more mirrors.  If the install did not succeed,
    # print out some instructions on how to reproduce this build failure
    # outside of the pipeline environment.
    if install_exit_code == 0:
        can_sign = spack_ci.can_sign_binaries()
        sign_binaries = can_sign and spack_is_pr_pipeline is False

        # Create buildcache in either the main remote mirror, or in the
        # per-PR mirror, if this is a PR pipeline
        if buildcache_mirror_url:
            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
                                          buildcache_mirror_url, sign_binaries)

            if cdash_build_id:
                tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
                    cdash_build_id, buildcache_mirror_url))
                spack_ci.write_cdashid_to_mirror(cdash_build_id, job_spec,
                                                 buildcache_mirror_url)

        # Create another copy of that buildcache in the per-pipeline
        # temporary storage mirror (this is only done if either
        # artifacts buildcache is enabled or a temporary storage url
        # prefix is set)
        if pipeline_mirror_url:
            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
                                          pipeline_mirror_url, sign_binaries)

            if cdash_build_id:
                tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
                    cdash_build_id, pipeline_mirror_url))
                spack_ci.write_cdashid_to_mirror(cdash_build_id, job_spec,
                                                 pipeline_mirror_url)

        # If this is a develop pipeline, check if the spec that we just built is
        # on the broken-specs list. If so, remove it.
        if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
            broken_specs_url = gitlab_ci['broken-specs-url']
            just_built_hash = job_spec.full_hash()
            broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
            if web_util.url_exists(broken_spec_path):
                tty.msg('Removing {0} from the list of broken specs'.format(
                    broken_spec_path))
                try:
                    web_util.remove_url(broken_spec_path)
                except Exception as err:
                    # If we got some kind of S3 (access denied or other connection
                    # error), the first non boto-specific class in the exception
                    # hierarchy is Exception.  Just print a warning and return
                    msg = 'Error removing {0} from broken specs list: {1}'.format(
                        broken_spec_path, err)
                    tty.warn(msg)

    else:
        tty.debug('spack install exited non-zero, will not create buildcache')

        api_root_url = get_env_var('CI_API_V4_URL')
        ci_project_id = get_env_var('CI_PROJECT_ID')
        ci_job_id = get_env_var('CI_JOB_ID')

        repro_job_url = '{0}/projects/{1}/jobs/{2}/artifacts'.format(
            api_root_url, ci_project_id, ci_job_id)

        # Control characters cause this to be printed in blue so it stands out
        reproduce_msg = """

\033[34mTo reproduce this build locally, run:

    spack ci reproduce-build {0} [--working-dir <dir>]

If this project does not have public pipelines, you will need to first:

    export GITLAB_PRIVATE_TOKEN=<generated_token>

... then follow the printed instructions.\033[0;0m

""".format(repro_job_url)

        print(reproduce_msg)

    # Tie job success/failure to the success/failure of building the spec
    return install_exit_code
Esempio n. 38
0
    def environment_modifications(self):
        """List of environment modifications to be processed."""
        # Modifications guessed by inspecting the spec prefix
        std_prefix_inspections = spack.config.get('modules:prefix_inspections',
                                                  {})
        set_prefix_inspections = spack.config.get(
            'modules:%s:prefix_inspections' % self.conf.name, {})
        prefix_inspections = spack.config.merge_yaml(std_prefix_inspections,
                                                     set_prefix_inspections)

        use_view = spack.config.get('modules:%s:use_view' % self.conf.name,
                                    False)

        spec = self.spec.copy()  # defensive copy before setting prefix
        if use_view:
            if use_view is True:
                use_view = ev.default_view_name

            env = ev.get_env({}, 'post_env_write_hook', required=True)
            view = env.views[use_view].view()

            spec.prefix = view.get_projection_for_spec(spec)

        env = spack.util.environment.inspect_path(
            spec.prefix,
            prefix_inspections,
            exclude=spack.util.environment.is_system_path)

        # Let the extendee/dependency modify their extensions/dependencies
        # before asking for package-specific modifications
        env.extend(
            build_environment.modifications_from_dependencies(spec,
                                                              context='run'))
        # Package specific modifications
        build_environment.set_module_variables_for_package(spec.package)
        spec.package.setup_run_environment(env)

        # Modifications required from modules.yaml
        env.extend(self.conf.env)

        # List of variables that are blacklisted in modules.yaml
        blacklist = self.conf.environment_blacklist

        # We may have tokens to substitute in environment commands

        # Prepare a suitable transformation dictionary for the names
        # of the environment variables. This means turn the valid
        # tokens uppercase.
        transform = {}
        for token in _valid_tokens:
            transform[token] = lambda s, string: str.upper(string)

        for x in env:
            # Ensure all the tokens are valid in this context
            msg = 'some tokens cannot be expanded in an environment variable name'  # noqa: E501
            _check_tokens_are_valid(x.name, message=msg)
            # Transform them
            x.name = spec.format(x.name, transform=transform)
            try:
                # Not every command has a value
                x.value = spec.format(x.value)
            except AttributeError:
                pass
            x.name = str(x.name).replace('-', '_')

        return [(type(x).__name__, x) for x in env if x.name not in blacklist]
Esempio n. 39
0
def location(parser, args):
    if args.module_dir:
        print(spack.paths.module_path)
        return

    if args.spack_root:
        print(spack.paths.prefix)
        return

    if args.location_env:
        path = spack.environment.root(args.location_env)
        if not os.path.isdir(path):
            tty.die("no such environment: '%s'" % args.location_env)
        print(path)
        return

    if args.packages:
        print(spack.repo.path.first_repo().root)
        return

    if args.stages:
        print(spack.stage.get_stage_root())
        return

    specs = spack.cmd.parse_specs(args.spec)

    if not specs:
        tty.die("You must supply a spec.")

    if len(specs) != 1:
        tty.die("Too many specs.  Supply only one.")

    # install_dir command matches against installed specs.
    if args.install_dir:
        env = ev.get_env(args, 'location')
        spec = spack.cmd.disambiguate_spec(specs[0], env)
        print(spec.prefix)
        return

    spec = specs[0]

    # Package dir just needs the spec name
    if args.package_dir:
        print(spack.repo.path.dirname_for_package_name(spec.name))
        return

    # Either concretize or filter from already concretized environment
    spec = spack.cmd.matching_spec_from_env(spec)
    pkg = spec.package

    if args.stage_dir:
        print(pkg.stage.path)
        return

    if args.build_dir:
        # Out of source builds have build_directory defined
        if hasattr(pkg, 'build_directory'):
            # build_directory can be either absolute or relative to the stage path
            # in either case os.path.join makes it absolute
            print(
                os.path.normpath(
                    os.path.join(pkg.stage.path, pkg.build_directory)))
            return

        # Otherwise assume in-source builds
        print(pkg.stage.source_path)
        return

    # source dir remains, which requires the spec to be staged
    if not pkg.stage.expanded:
        tty.die(
            "Source directory does not exist yet. "
            "Run this to create it:", "spack stage " + " ".join(args.spec))

    # Default to source dir.
    print(pkg.stage.source_path)
Esempio n. 40
0
def install(parser, args, **kwargs):
    if args.help_cdash:
        parser = argparse.ArgumentParser(
            formatter_class=argparse.RawDescriptionHelpFormatter,
            epilog=textwrap.dedent('''\
environment variables:
  SPACK_CDASH_AUTH_TOKEN
                        authentication token to present to CDash
                        '''))
        arguments.add_cdash_args(parser, True)
        parser.print_help()
        return

    reporter = spack.report.collect_info(spack.package.PackageInstaller,
                                         '_install_task', args.log_format,
                                         args)
    if args.log_file:
        reporter.filename = args.log_file

    if not args.spec and not args.specfiles:
        # if there are no args but an active environment
        # then install the packages from it.
        env = ev.get_env(args, 'install')
        if env:
            if not args.only_concrete:
                with env.write_transaction():
                    concretized_specs = env.concretize()
                    ev.display_specs(concretized_specs)

                    # save view regeneration for later, so that we only do it
                    # once, as it can be slow.
                    env.write(regenerate_views=False)

            specs = env.all_specs()
            if not args.log_file and not reporter.filename:
                reporter.filename = default_log_file(specs[0])
            reporter.specs = specs

            tty.msg("Installing environment {0}".format(env.name))
            with reporter:
                env.install_all(args, **kwargs)

            tty.debug("Regenerating environment views for {0}".format(
                env.name))
            with env.write_transaction():
                # It is not strictly required to synchronize view regeneration
                # but doing so can prevent redundant work in the filesystem.
                env.regenerate_views()
            return
        else:
            msg = "install requires a package argument or active environment"
            if 'spack.yaml' in os.listdir(os.getcwd()):
                # There's a spack.yaml file in the working dir, the user may
                # have intended to use that
                msg += "\n\n"
                msg += "Did you mean to install using the `spack.yaml`"
                msg += " in this directory? Try: \n"
                msg += "    spack env activate .\n"
                msg += "    spack install\n"
                msg += "  OR\n"
                msg += "    spack --env . install"
            tty.die(msg)

    if args.no_checksum:
        spack.config.set('config:checksum', False, scope='command_line')

    # Parse cli arguments and construct a dictionary
    # that will be passed to the package installer
    update_kwargs_from_args(args, kwargs)

    if args.run_tests:
        tty.warn("Deprecated option: --run-tests: use --test=all instead")

    # 1. Abstract specs from cli
    abstract_specs = spack.cmd.parse_specs(args.spec)
    tests = False
    if args.test == 'all' or args.run_tests:
        tests = True
    elif args.test == 'root':
        tests = [spec.name for spec in abstract_specs]
    kwargs['tests'] = tests

    try:
        specs = spack.cmd.parse_specs(args.spec, concretize=True, tests=tests)
    except SpackError as e:
        tty.debug(e)
        reporter.concretization_report(e.message)
        raise

    # 2. Concrete specs from yaml files
    for file in args.specfiles:
        with open(file, 'r') as f:
            s = spack.spec.Spec.from_yaml(f)

        concretized = s.concretized()
        if concretized.dag_hash() != s.dag_hash():
            msg = 'skipped invalid file "{0}". '
            msg += 'The file does not contain a concrete spec.'
            tty.warn(msg.format(file))
            continue

        abstract_specs.append(s)
        specs.append(concretized)

    if len(specs) == 0:
        tty.die('The `spack install` command requires a spec to install.')

    if not args.log_file and not reporter.filename:
        reporter.filename = default_log_file(specs[0])
    reporter.specs = specs
    with reporter('build'):
        if args.overwrite:

            installed = list(
                filter(lambda x: x, map(spack.store.db.query_one, specs)))
            if not args.yes_to_all:
                display_args = {
                    'long': True,
                    'show_flags': True,
                    'variants': True
                }

                if installed:
                    tty.msg('The following package specs will be '
                            'reinstalled:\n')
                    spack.cmd.display_specs(installed, **display_args)

                not_installed = list(
                    filter(lambda x: x not in installed, specs))
                if not_installed:
                    tty.msg('The following package specs are not installed and'
                            ' the --overwrite flag was given. The package spec'
                            ' will be newly installed:\n')
                    spack.cmd.display_specs(not_installed, **display_args)

                # We have some specs, so one of the above must have been true
                answer = tty.get_yes_or_no('Do you want to proceed?',
                                           default=False)
                if not answer:
                    tty.die('Reinstallation aborted.')

            # overwrite all concrete explicit specs from this build
            kwargs['overwrite'] = [spec.dag_hash() for spec in specs]

        install_specs(args, kwargs, zip(abstract_specs, specs))