Exemple #1
0
def restore_impl(name, the_collection):
    # First unload all loaded modules
    pymod.environ.unset(pymod.names.loaded_collection)
    pymod.mc.purge(load_after_purge=False)

    # clear the modulepath
    orig_path = pymod.modulepath.path()
    path = pymod.modulepath.Modulepath([])
    pymod.modulepath.set_path(path)

    # Now load the collection, one module at a time
    for (directory, archives) in the_collection:
        pymod.mc.use(directory, append=True)
        for ar in archives:
            try:
                module = pymod.mc.unarchive_module(ar)
                tty.verbose("Loading part of collection: {0}".format(module))
            except pymod.error.ModuleNotFoundError:
                raise pymod.error.CollectionModuleNotFoundError(
                    ar["fullname"], ar["filename"])
            pymod.mc.load_impl(module)
            module.acquired_as = module.fullname
            assert module.is_loaded
    pymod.environ.set(pymod.names.loaded_collection, name)
    for p in orig_path:
        if not pymod.modulepath.contains(p):  # pragma: no cover
            pymod.mc.use(p, append=True)
    return None
Exemple #2
0
def factory(root, path):
    filename = os.path.join(root, path)
    if not os.path.isfile(filename):  # pragma: no cover
        tty.verbose("{0} does not exist".format(filename))
        return None
    elif filename.endswith(("~", )) or filename.startswith(
        (".#", )):  # pragma: no cover
        # Don't read backup files
        return None

    if filename.endswith(".py"):
        module_type = PyModule
    elif is_tcl_module(filename):
        module_type = TclModule
    else:
        return None

    module = module_type(root, path)
    if pymod.config.get("debug"):  # pragma: no cover
        if module_type == TclModule and "gcc" in filename:
            tty.debug(module.name)
            tty.debug(module.modulepath)
            tty.debug(module.filename, "\n")

    return module
Exemple #3
0
def on_install_success(spec):
    """On the success of an install (after everything is complete)
    """
    if not spack.monitor.cli:
        return

    tty.debug("Running on_install_success for %s" % spec)
    result = spack.monitor.cli.update_build(spec, status="SUCCESS")
    tty.verbose(result.get('message'))
Exemple #4
0
def on_install_failure(spec):
    """Triggered on failure of an install
    """
    if not spack.monitor.cli:
        return

    tty.debug("Running on_install_failure for %s" % spec)
    result = spack.monitor.cli.fail_task(spec)
    tty.verbose(result.get('message'))
Exemple #5
0
def on_phase_success(pkg, phase_name, log_file):
    """Triggered on a phase success
    """
    if not spack.monitor.cli:
        return

    tty.debug("Running on_phase_success %s, phase %s" % (pkg.name, phase_name))
    result = spack.monitor.cli.send_phase(pkg, phase_name, log_file, "SUCCESS")
    tty.verbose(result.get('message'))
Exemple #6
0
def on_phase_error(pkg, phase_name, log_file):
    """Triggered on a phase error
    """
    if not spack.monitor.cli:
        return

    tty.debug("Running on_phase_error %s, phase %s" % (pkg.name, phase_name))
    result = spack.monitor.cli.send_phase(pkg, phase_name, log_file, "ERROR")
    tty.verbose(result.get('message'))
Exemple #7
0
def on_install_start(spec):
    """On start of an install, we want to ping the server if it exists
    """
    if not spack.monitor.cli:
        return

    tty.debug("Running on_install_start for %s" % spec)
    build_id = spack.monitor.cli.new_build(spec)
    tty.verbose("Build created with id %s" % build_id)
Exemple #8
0
def pop_marked_default(dirname, versions):
    dirname = os.path.realpath(dirname)
    assert os.path.isdir(dirname)
    linked_default = pop_linked_default(dirname, versions)
    versioned_default = pop_versioned_default(dirname, versions)
    if linked_default and versioned_default:
        tty.verbose("A linked and versioned default exist in {0}, "
                    "choosing the linked".format(dirname))
        return linked_default
    return linked_default or versioned_default
Exemple #9
0
def refresh():
    """Unload all modules from environment and reload them"""
    loaded_modules = pymod.mc.get_loaded_modules()
    for module in loaded_modules[::-1]:
        tty.verbose("Unloading {0}".format(module))
        if module.is_loaded:
            pymod.mc.unload_impl(module)
    for module in loaded_modules:
        tty.verbose("Loading {0}".format(module))
        if not module.is_loaded:
            pymod.mc.load_impl(module)
Exemple #10
0
 def append_path(self, dirname):
     dirname = Path.expand_name(dirname)
     if dirname in self:
         return
     path = Path(dirname)
     if not path.modules:
         tty.verbose("No modules found in {0}".format(path.path))
         return
     self.path.append(path)
     self.path_modified()
     return path.modules
Exemple #11
0
def mark_explicit_defaults(modules, defaults):
    for (name, filename) in defaults.items():
        mods = modules.get(name)
        if mods is None:
            tty.debug("There is no module named {0}".format(name))
            continue
        for module in mods:
            if os.path.realpath(module.filename) == os.path.realpath(filename):
                module.marked_as_default = True
                break
        else:
            tty.verbose(
                "No matching module to mark default for {0}".format(name))
Exemple #12
0
def pop_versioned_default(dirname, files):
    """TCL modules .version scheme"""
    version_file_name = ".version"
    try:
        files.remove(version_file_name)
    except ValueError:
        return None
    version_file = os.path.join(dirname, version_file_name)
    version = read_tcl_default_version(version_file)
    if version is None:
        tty.verbose(
            "Could not determine .version default in {0}".format(dirname))
    else:
        default_file = os.path.join(dirname, version)
        if os.path.exists(default_file):
            return default_file
        tty.verbose(
            "{0!r}: version default does not exist".format(default_file))
Exemple #13
0
    def __init__(self, args):
        Reporter.__init__(self, args)
        self.success = True
        # Posixpath is used here to support the underlying template enginge
        # Jinja2, which expects `/` path separators
        self.template_dir = posixpath.join('reports', 'cdash')
        self.cdash_upload_url = args.cdash_upload_url

        if self.cdash_upload_url:
            self.buildid_regexp = re.compile("<buildId>([0-9]+)</buildId>")
        self.phase_regexp = re.compile(r"Executing phase: '(.*)'")

        self.authtoken = None
        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            self.authtoken = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

        if getattr(args, 'spec', ''):
            packages = args.spec
        elif getattr(args, 'specs', ''):
            packages = args.specs
        else:
            packages = []
            for file in args.specfiles:
                with open(file, 'r') as f:
                    s = spack.spec.Spec.from_yaml(f)
                    packages.append(s.format())
        self.install_command = ' '.join(packages)
        self.base_buildname = args.cdash_build or self.install_command
        self.site = args.cdash_site or socket.gethostname()
        self.osname = platform.system()
        self.endtime = int(time.time())
        if args.cdash_buildstamp:
            self.buildstamp = args.cdash_buildstamp
        else:
            buildstamp_format = "%Y%m%d-%H%M-{0}".format(args.cdash_track)
            self.buildstamp = time.strftime(buildstamp_format,
                                            time.localtime(self.endtime))
        self.buildIds = collections.OrderedDict()
        self.revision = ''
        git = which('git')
        with working_dir(spack.paths.spack_root):
            self.revision = git('rev-parse', 'HEAD', output=str).strip()
        self.multiple_packages = False
Exemple #14
0
def pop_linked_default(dirname, files):
    """Look for a file named `default` that is a symlink to a module file"""
    linked_default_name = "default"
    try:
        files.remove(linked_default_name)
    except ValueError:
        return None

    linked_default_file = os.path.join(dirname, linked_default_name)
    if not os.path.islink(linked_default_file):
        tty.verbose("Modulepath: expected file named `default` in {0} "
                    "to be a link to a modulefile".format(dirname))
        return None

    linked_default_source = os.path.realpath(linked_default_file)
    if not os.path.dirname(linked_default_source) == dirname:
        tty.verbose(
            "Modulepath: expected file named `default` in {0} to be "
            "a link to a modulefile in the same directory".format(dirname))
        return None

    return linked_default_source
Exemple #15
0
def find_modules(directory):

    directory = os.path.expanduser(directory)

    if directory == "/":
        tty.verbose("Requesting to find modules in root directory")
        return None

    if not os.access(directory, os.R_OK):
        tty.verbose("{0!r} is not an accessible directory".format(directory))
        return None

    if not os.path.isdir(directory):  # pragma: no cover
        # This should be redundant because of the previous check
        tty.verbose("{0!r} is not a directory".format(directory))
        return None

    return _find(directory)
Exemple #16
0
def load(name, opts=None, insert_at=None, caller="command_line"):
    """Load the module given by `name`

    This is a higher level interface to `load_impl` that gets the actual module
    object from `name`

    Parameters
    ----------
    name : string_like
        Module name, full name, or file path
    opts : dict
        (Optional) options to send to module
    insert_at : int
        Load the module as the `insert_at`th module.
    caller : str
        Who is calling. If modulefile, the reference count will be incremented
        if the module is already loaded.

    Returns
    -------
    module : Module
        If the `name` was loaded (or is already loaded), return its module.

    Raises
    ------
    ModuleNotFoundError

    """
    tty.verbose("Loading {0}".format(name))

    # Execute the module
    module = pymod.modulepath.get(name, use_file_modulepath=True)  # caller=="command_line")
    if module is None:
        if caller == "command_line":
            collection = pymod.collection.get(name)
            if collection is not None:
                return pymod.mc.collection.restore_impl(name, collection)
        raise ModuleNotFoundError(name, mp=pymod.modulepath)

    # Set the command line options
    if opts:
        module.opts = opts

    if module.is_loaded:
        if caller == "modulefile":
            pymod.mc.increment_refcount(module)
        else:
            tty.warn(
                "{0} is already loaded, use 'module reload' to reload".format(
                    module.fullname
                )
            )
        return module

    if pymod.environ.get(pymod.names.loaded_collection):  # pragma: no cover
        collection = pymod.environ.get(pymod.names.loaded_collection)
        tty.debug(
            "Loading {0} on top of loaded collection {1}. "
            "Removing the collection name from the environment".format(
                module.fullname, collection
            )
        )
        pymod.environ.unset(pymod.names.loaded_collection)

    if insert_at is not None:
        load_inserted_impl(module, insert_at)
    else:
        load_impl(module)

    return module
Exemple #17
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            custom_spack_repo=None,
                            custom_spack_ref=None):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']
    ci_mappings = gitlab_ci['mappings']

    final_job_config = None
    if 'final-stage-rebuild-index' in gitlab_ci:
        final_job_config = gitlab_ci['final-stage-rebuild-index']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    # Make sure we use a custom spack if necessary
    before_script = None
    after_script = None
    if custom_spack_repo:
        if not custom_spack_ref:
            custom_spack_ref = 'master'
        before_script = [
            ('git clone "{0}" --branch "{1}" --depth 1 '
             '--single-branch'.format(custom_spack_repo, custom_spack_ref)),
            # Next line just shows spack version in pipeline output
            'pushd ./spack && git rev-parse HEAD && popd',
            '. "./spack/share/spack/setup-env.sh"',
        ]
        after_script = ['rm -rf "./spack"']

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    staged_phases = {}
    for phase in phases:
        phase_name = phase['name']
        with spack.concretize.disable_compiler_existence_check():
            staged_phases[phase_name] = stage_spec_jobs(
                env.spec_lists[phase_name])

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                root_spec = spec_labels[spec_label]['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec,
                                                      ci_mappings)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                debug_flag = ''
                if 'enable-debug-messages' in gitlab_ci:
                    debug_flag = '-d '

                job_scripts = ['spack {0}ci rebuild'.format(debug_flag)]

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                }

                job_dependencies = []
                if spec_label in dependencies:
                    for dep_label in dependencies[spec_label]:
                        dep_pkg = pkg_name_from_spec_label(dep_label)
                        dep_spec = spec_labels[dep_label]['rootSpec'][dep_pkg]
                        dep_job_name = get_job_name(phase_name,
                                                    strip_compilers, dep_spec,
                                                    osname, build_group)
                        job_dependencies.append(dep_job_name)

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add one more dependency to
                # "job_dependencies" (that compiler).
                if is_main_phase(phase_name):
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        bs_arch = bs['spec'].architecture
                        if (bs['spec'].satisfies(compiler_pkg_spec)
                                and bs_arch == release_spec.architecture):
                            c_job_name = get_job_name(bs['phase-name'],
                                                      bs['strip-compilers'],
                                                      bs['spec'], str(bs_arch),
                                                      build_group)
                            job_dependencies.append(c_job_name)

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        related_builds)

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    artifact_paths.append('local_mirror/build_cache')

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_scripts,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'dependencies': job_dependencies,
                }

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    if final_job_config:
        # Add an extra, final job to regenerate the index
        final_stage = 'stage-rebuild-index'
        final_job = {
            'stage': final_stage,
            'script':
            'spack buildcache update-index -d {0}'.format(mirror_urls[0]),
            'tags': final_job_config['tags']
        }
        if 'image' in final_job_config:
            final_job['image'] = final_job_config['image']
        if before_script:
            final_job['before_script'] = before_script
        if after_script:
            final_job['after_script'] = after_script
        output_object['rebuild-index'] = final_job
        stage_names.append(final_stage)

    output_object['stages'] = stage_names

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(output_object, default_flow_style=True))
Exemple #18
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            run_optimizer=False,
                            use_dependencies=False):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']

    final_job_config = None
    if 'final-stage-rebuild-index' in gitlab_ci:
        final_job_config = gitlab_ci['final-stage-rebuild-index']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    is_pr_pipeline = (os.environ.get('SPACK_IS_PR_PIPELINE',
                                     '').lower() == 'true')

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    staged_phases = {}
    for phase in phases:
        phase_name = phase['name']
        with spack.concretize.disable_compiler_existence_check():
            staged_phases[phase_name] = stage_spec_jobs(
                env.spec_lists[phase_name])

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    max_length_needs = 0
    max_needs_job = ''

    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                root_spec = spec_labels[spec_label]['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec, gitlab_ci)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                job_script = [
                    'spack env activate --without-view .',
                    'spack ci rebuild',
                ]
                if 'script' in runner_attribs:
                    job_script = [s for s in runner_attribs['script']]

                before_script = None
                if 'before_script' in runner_attribs:
                    before_script = [
                        s for s in runner_attribs['before_script']
                    ]

                after_script = None
                if 'after_script' in runner_attribs:
                    after_script = [s for s in runner_attribs['after_script']]

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                    'SPACK_IS_PR_PIPELINE':
                    str(is_pr_pipeline),
                }

                job_dependencies = []
                if spec_label in dependencies:
                    if enable_artifacts_buildcache:
                        dep_jobs = [
                            d for d in release_spec.traverse(deptype=all,
                                                             root=False)
                        ]
                    else:
                        dep_jobs = []
                        for dep_label in dependencies[spec_label]:
                            dep_pkg = pkg_name_from_spec_label(dep_label)
                            dep_root = spec_labels[dep_label]['rootSpec']
                            dep_jobs.append(dep_root[dep_pkg])

                    job_dependencies.extend(
                        format_job_needs(phase_name, strip_compilers, dep_jobs,
                                         osname, build_group,
                                         enable_artifacts_buildcache))

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add more dependencies to
                # the job (that compiler and maybe it's dependencies as well).
                if is_main_phase(phase_name):
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        bs_arch = bs['spec'].architecture
                        if (bs['spec'].satisfies(compiler_pkg_spec)
                                and bs_arch == release_spec.architecture):
                            # We found the bootstrap compiler this release spec
                            # should be built with, so for DAG scheduling
                            # purposes, we will at least add the compiler spec
                            # to the jobs "needs".  But if artifact buildcache
                            # is enabled, we'll have to add all transtive deps
                            # of the compiler as well.
                            dep_jobs = [bs['spec']]
                            if enable_artifacts_buildcache:
                                dep_jobs = [
                                    d for d in bs['spec'].traverse(deptype=all)
                                ]

                            job_dependencies.extend(
                                format_job_needs(bs['phase-name'],
                                                 bs['strip-compilers'],
                                                 dep_jobs, str(bs_arch),
                                                 build_group,
                                                 enable_artifacts_buildcache))

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        sorted(related_builds))

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    bc_root = 'local_mirror/build_cache'
                    artifact_paths.extend([
                        os.path.join(bc_root, p) for p in [
                            bindist.tarball_name(release_spec, '.spec.yaml'),
                            bindist.tarball_name(release_spec, '.cdashid'),
                            bindist.tarball_directory_name(release_spec),
                        ]
                    ])

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_script,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'needs': sorted(job_dependencies, key=lambda d: d['job']),
                    'retry': {
                        'max': 2,
                        'when': JOB_RETRY_CONDITIONS,
                    }
                }

                length_needs = len(job_dependencies)
                if length_needs > max_length_needs:
                    max_length_needs = length_needs
                    max_needs_job = job_name

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    tty.debug('The max_needs_job is {0}, with {1} needs'.format(
        max_needs_job, max_length_needs))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    if final_job_config and not is_pr_pipeline:
        # Add an extra, final job to regenerate the index
        final_stage = 'stage-rebuild-index'
        final_job = {
            'stage':
            final_stage,
            'script':
            'spack buildcache update-index --keys -d {0}'.format(
                mirror_urls[0]),
            'tags':
            final_job_config['tags'],
            'when':
            'always'
        }
        if 'image' in final_job_config:
            final_job['image'] = final_job_config['image']
        if before_script:
            final_job['before_script'] = before_script
        if after_script:
            final_job['after_script'] = after_script
        output_object['rebuild-index'] = final_job
        stage_names.append(final_stage)

    output_object['stages'] = stage_names

    # Capture the version of spack used to generate the pipeline, transform it
    # into a value that can be passed to "git checkout", and save it in a
    # global yaml variable
    spack_version = spack.main.get_version()
    version_to_clone = None
    v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
    if v_match:
        version_to_clone = 'v{0}'.format(v_match.group(0))
    else:
        v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
        if v_match:
            version_to_clone = v_match.group(1)
        else:
            version_to_clone = spack_version

    output_object['variables'] = {
        'SPACK_VERSION': spack_version,
        'SPACK_CHECKOUT_VERSION': version_to_clone,
    }

    sorted_output = {}
    for output_key, output_value in sorted(output_object.items()):
        sorted_output[output_key] = output_value

    # TODO(opadron): remove this or refactor
    if run_optimizer:
        import spack.ci_optimization as ci_opt
        sorted_output = ci_opt.optimizer(sorted_output)

    # TODO(opadron): remove this or refactor
    if use_dependencies:
        import spack.ci_needs_workaround as cinw
        sorted_output = cinw.needs_to_dependencies(sorted_output)

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
Exemple #19
0
 def _log_command_output(self, out):
     if tty.is_verbose():
         fmt = self.command_name + ': {0}'
         for ln in out.getvalue().split('\n'):
             if len(ln) > 0:
                 tty.verbose(fmt.format(ln.replace('==> ', '')))
Exemple #20
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            prune_dag=False,
                            check_index_only=False,
                            run_optimizer=False,
                            use_dependencies=False):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    is_pr_pipeline = (os.environ.get('SPACK_IS_PR_PIPELINE',
                                     '').lower() == 'true')

    spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
    pr_mirror_url = None
    if spack_pr_branch:
        pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
                                      spack_pr_branch)

    if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
        tty.die('spack ci generate requires an env containing a mirror')

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    rebuild_index_enabled = True
    if 'rebuild-index' in gitlab_ci and gitlab_ci['rebuild-index'] is False:
        rebuild_index_enabled = False

    temp_storage_url_prefix = None
    if 'temporary-storage-url-prefix' in gitlab_ci:
        temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    # Add this mirror if it's enabled, as some specs might be up to date
    # here and thus not need to be rebuilt.
    if pr_mirror_url:
        add_pr_mirror(pr_mirror_url)

    # Speed up staging by first fetching binary indices from all mirrors
    # (including the per-PR mirror we may have just added above).
    bindist.binary_index.update()

    staged_phases = {}
    try:
        for phase in phases:
            phase_name = phase['name']
            with spack.concretize.disable_compiler_existence_check():
                staged_phases[phase_name] = stage_spec_jobs(
                    env.spec_lists[phase_name],
                    check_index_only=check_index_only)
    finally:
        # Clean up PR mirror if enabled
        if pr_mirror_url:
            remove_pr_mirror()

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    max_length_needs = 0
    max_needs_job = ''

    before_script, after_script = None, None
    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                spec_record = spec_labels[spec_label]
                root_spec = spec_record['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec, gitlab_ci)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                job_script = [
                    'spack env activate --without-view .',
                    'spack ci rebuild',
                ]
                if 'script' in runner_attribs:
                    job_script = [s for s in runner_attribs['script']]

                before_script = None
                if 'before_script' in runner_attribs:
                    before_script = [
                        s for s in runner_attribs['before_script']
                    ]

                after_script = None
                if 'after_script' in runner_attribs:
                    after_script = [s for s in runner_attribs['after_script']]

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                    'SPACK_IS_PR_PIPELINE':
                    str(is_pr_pipeline),
                }

                job_dependencies = []
                if spec_label in dependencies:
                    if enable_artifacts_buildcache:
                        # Get dependencies transitively, so they're all
                        # available in the artifacts buildcache.
                        dep_jobs = [
                            d for d in release_spec.traverse(deptype=all,
                                                             root=False)
                        ]
                    else:
                        # In this case, "needs" is only used for scheduling
                        # purposes, so we only get the direct dependencies.
                        dep_jobs = []
                        for dep_label in dependencies[spec_label]:
                            dep_pkg = pkg_name_from_spec_label(dep_label)
                            dep_root = spec_labels[dep_label]['rootSpec']
                            dep_jobs.append(dep_root[dep_pkg])

                    job_dependencies.extend(
                        format_job_needs(phase_name, strip_compilers, dep_jobs,
                                         osname, build_group, prune_dag,
                                         spec_labels,
                                         enable_artifacts_buildcache))

                rebuild_spec = spec_record['needs_rebuild']

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add more dependencies to
                # the job (that compiler and maybe it's dependencies as well).
                if is_main_phase(phase_name):
                    spec_arch_family = (release_spec.architecture.target.
                                        microarchitecture.family)
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        c_spec = bs['spec']
                        bs_arch = c_spec.architecture
                        bs_arch_family = (
                            bs_arch.target.microarchitecture.family)
                        if (c_spec.satisfies(compiler_pkg_spec)
                                and bs_arch_family == spec_arch_family):
                            # We found the bootstrap compiler this release spec
                            # should be built with, so for DAG scheduling
                            # purposes, we will at least add the compiler spec
                            # to the jobs "needs".  But if artifact buildcache
                            # is enabled, we'll have to add all transtive deps
                            # of the compiler as well.

                            # Here we check whether the bootstrapped compiler
                            # needs to be rebuilt.  Until compilers are proper
                            # dependencies, we artificially force the spec to
                            # be rebuilt if the compiler targeted to build it
                            # needs to be rebuilt.
                            bs_specs, _, _ = staged_phases[bs['phase-name']]
                            c_spec_key = spec_deps_key(c_spec)
                            rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
                            rebuild_spec = rebuild_spec or rbld_comp
                            # Also update record so dependents do not fail to
                            # add this spec to their "needs"
                            spec_record['needs_rebuild'] = rebuild_spec

                            dep_jobs = [c_spec]
                            if enable_artifacts_buildcache:
                                dep_jobs = [
                                    d for d in c_spec.traverse(deptype=all)
                                ]

                            job_dependencies.extend(
                                format_job_needs(bs['phase-name'],
                                                 bs['strip-compilers'],
                                                 dep_jobs, str(bs_arch),
                                                 build_group, prune_dag,
                                                 bs_specs,
                                                 enable_artifacts_buildcache))
                        else:
                            debug_msg = ''.join([
                                'Considered compiler {0} for spec ',
                                '{1}, but rejected it either because it was ',
                                'not the compiler required by the spec, or ',
                                'because the target arch families of the ',
                                'spec and the compiler did not match'
                            ]).format(c_spec, release_spec)
                            tty.debug(debug_msg)

                if prune_dag and not rebuild_spec:
                    continue

                job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        sorted(related_builds))

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    bc_root = 'local_mirror/build_cache'
                    artifact_paths.extend([
                        os.path.join(bc_root, p) for p in [
                            bindist.tarball_name(release_spec, '.spec.yaml'),
                            bindist.tarball_name(release_spec, '.cdashid'),
                            bindist.tarball_directory_name(release_spec),
                        ]
                    ])

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_script,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'needs': sorted(job_dependencies, key=lambda d: d['job']),
                    'retry': {
                        'max': 2,
                        'when': JOB_RETRY_CONDITIONS,
                    },
                    'interruptible': True
                }

                length_needs = len(job_dependencies)
                if length_needs > max_length_needs:
                    max_length_needs = length_needs
                    max_needs_job = job_name

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    if job_id > 0:
        tty.debug('The max_needs_job is {0}, with {1} needs'.format(
            max_needs_job, max_length_needs))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    service_job_config = None
    if 'service-job-attributes' in gitlab_ci:
        service_job_config = gitlab_ci['service-job-attributes']

    default_attrs = [
        'image',
        'tags',
        'variables',
        'before_script',
        # 'script',
        'after_script',
    ]

    if job_id > 0:
        if temp_storage_url_prefix:
            # There were some rebuild jobs scheduled, so we will need to
            # schedule a job to clean up the temporary storage location
            # associated with this pipeline.
            stage_names.append('cleanup-temp-storage')
            cleanup_job = {}

            if service_job_config:
                copy_attributes(default_attrs, service_job_config, cleanup_job)

            cleanup_job['stage'] = 'cleanup-temp-storage'
            cleanup_job['script'] = [
                'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.
                format(temp_storage_url_prefix)
            ]
            cleanup_job['when'] = 'always'

            output_object['cleanup'] = cleanup_job

        if rebuild_index_enabled:
            # Add a final job to regenerate the index
            stage_names.append('stage-rebuild-index')
            final_job = {}

            if service_job_config:
                copy_attributes(default_attrs, service_job_config, final_job)

            index_target_mirror = mirror_urls[0]
            if is_pr_pipeline:
                index_target_mirror = pr_mirror_url

            final_job['stage'] = 'stage-rebuild-index'
            final_job['script'] = [
                'spack buildcache update-index --keys -d {0}'.format(
                    index_target_mirror)
            ]
            final_job['when'] = 'always'

            output_object['rebuild-index'] = final_job

        output_object['stages'] = stage_names

        # Capture the version of spack used to generate the pipeline, transform it
        # into a value that can be passed to "git checkout", and save it in a
        # global yaml variable
        spack_version = spack.main.get_version()
        version_to_clone = None
        v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
        if v_match:
            version_to_clone = 'v{0}'.format(v_match.group(0))
        else:
            v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
            if v_match:
                version_to_clone = v_match.group(1)
            else:
                version_to_clone = spack_version

        output_object['variables'] = {
            'SPACK_VERSION': spack_version,
            'SPACK_CHECKOUT_VERSION': version_to_clone,
        }

        if pr_mirror_url:
            output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url

        sorted_output = {}
        for output_key, output_value in sorted(output_object.items()):
            sorted_output[output_key] = output_value

        # TODO(opadron): remove this or refactor
        if run_optimizer:
            import spack.ci_optimization as ci_opt
            sorted_output = ci_opt.optimizer(sorted_output)

        # TODO(opadron): remove this or refactor
        if use_dependencies:
            import spack.ci_needs_workaround as cinw
            sorted_output = cinw.needs_to_dependencies(sorted_output)
    else:
        # No jobs were generated
        tty.debug('No specs to rebuild, generating no-op job')
        noop_job = {}

        if service_job_config:
            copy_attributes(default_attrs, service_job_config, noop_job)

        if 'script' not in noop_job:
            noop_job['script'] = [
                'echo "All specs already up to date, nothing to rebuild."',
            ]

        sorted_output = {'no-specs-to-rebuild': noop_job}

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(sorted_output, default_flow_style=True))