示例#1
0
 def print_section(self, section, blame=False):
     """Print a configuration to stdout."""
     try:
         data = syaml.syaml_dict()
         data[section] = self.get_config(section)
         syaml.dump_config(data,
                           stream=sys.stdout,
                           default_flow_style=False,
                           blame=blame)
     except (yaml.YAMLError, IOError):
         raise ConfigError("Error reading configuration: %s" % section)
示例#2
0
文件: config.py 项目: cgmb/rocm-spack
def test_add_config_filename(mock_low_high_config, tmpdir):

    config_yaml = tmpdir.join('config-filename.yaml')
    config_yaml.ensure()
    with config_yaml.open('w') as f:
        syaml.dump_config(config_low, f)

    spack.config.add_from_file(str(config_yaml), scope="low")
    assert "build_stage" in spack.config.get('config')
    build_stages = spack.config.get('config')['build_stage']
    for stage in config_low['config']['build_stage']:
        assert stage in build_stages
示例#3
0
    def write_section(self, section):
        filename = self.get_section_filename(section)
        data = self.get_section(section)
        validate(data, section_schemas[section])

        try:
            mkdirp(self.path)
            with open(filename, 'w') as f:
                validate(data, section_schemas[section])
                syaml.dump_config(data, stream=f, default_flow_style=False)
        except (yaml.YAMLError, IOError) as e:
            raise ConfigFileError(
                "Error writing to config file: '%s'" % str(e))
示例#4
0
    def write_section(self, section):
        validate(self.sections, self.schema)
        try:
            parent = os.path.dirname(self.path)
            mkdirp(parent)

            tmp = os.path.join(parent, '.%s.tmp' % self.path)
            with open(tmp, 'w') as f:
                syaml.dump_config(self.sections, stream=f,
                                  default_flow_style=False)
            os.path.move(tmp, self.path)
        except (yaml.YAMLError, IOError) as e:
            raise ConfigFileError(
                "Error writing to config file: '%s'" % str(e))
示例#5
0
    def _write_section(self, section):
        filename = self.get_section_filename(section)
        data = self.get_section(section)

        # We copy data here to avoid adding defaults at write time
        validate_data = copy.deepcopy(data)
        validate(validate_data, section_schemas[section])

        try:
            mkdirp(self.path)
            with open(filename, 'w') as f:
                syaml.dump_config(data, stream=f, default_flow_style=False)
        except (yaml.YAMLError, IOError) as e:
            raise ConfigFileError(
                "Error writing to config file: '%s'" % str(e))
示例#6
0
def config_updater(cfg_type, cfg_file):
    # Get a function to update the format
    """
    Courtesy of Greg Becker
    """
    update_fn = spack.config.ensure_latest_format_fn(cfg_type)
    with open(cfg_file) as f:
        raw_data = syaml.load_config(f) or {}
        data = raw_data.pop(cfg_type, {})
    update_fn(data)
    # Make a backup copy and rewrite the file
    bkp_file = cfg_file + '.bkp'
    shutil.copy(cfg_file, bkp_file)
    write_data = {cfg_type: data}
    with open(cfg_file, 'w') as f:
        syaml.dump_config(write_data, stream=f, default_flow_style=False)
    msg = 'File "{0}" updated [backup={1}]'
    tty.msg(msg.format(cfg_file, bkp_file))
示例#7
0
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
    """Try applying an optimization pass and return information about the
    result

    "name" is a string describing the nature of the pass. If it is a non-empty
    string, summary statistics are also printed to stdout.

    "yaml" is the object to apply the pass to.

    "optimization_pass" is the function implementing the pass to be applied.

    "args" and "kwargs" are the additional arguments to pass to optimization
    pass. The pass is applied as

    >>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)

    The pass's results are greedily rejected if it does not modify the original
    yaml document, or if it produces a yaml document that serializes to a
    larger string.

    Returns (new_yaml, yaml, applied, other_results) if applied, or
    (yaml, new_yaml, applied, other_results) otherwise.
    """
    result = optimization_pass(yaml, *args, **kwargs)
    new_yaml, other_results = result[0], result[1:]

    if new_yaml is yaml:
        # pass was not applied
        return (yaml, new_yaml, False, other_results)

    pre_size = len(
        syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
    post_size = len(
        syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))

    # pass makes the size worse: not applying
    applied = (post_size <= pre_size)
    if applied:
        yaml, new_yaml = new_yaml, yaml

    if name:
        print_delta(name, pre_size, post_size, applied)

    return (yaml, new_yaml, applied, other_results)
示例#8
0
    def _write_section(self, section):
        data_to_write = self._raw_data

        # If there is no existing data, this section SingleFileScope has never
        # been written to disk. We need to construct the portion of the data
        # from the root of self._raw_data to the level at which the config
        # sections are defined. That requires creating keys for every entry in
        # self.yaml_path
        if not data_to_write:
            data_to_write = {}
            # reverse because we construct it from the inside out
            for key in reversed(self.yaml_path):
                data_to_write = {key: data_to_write}

        # data_update_pointer is a pointer to the part of data_to_write
        # that we are currently updating.
        # We start by traversing into the data to the point at which the
        # config sections are defined. This means popping the keys from
        # self.yaml_path
        data_update_pointer = data_to_write
        for key in self.yaml_path:
            data_update_pointer = data_update_pointer[key]

        # For each section, update the data at the level of our pointer
        # with the data from the section
        for key, data in self.sections.items():
            data_update_pointer[key] = data[key]

        validate(data_to_write, self.schema)
        try:
            parent = os.path.dirname(self.path)
            mkdirp(parent)

            tmp = os.path.join(parent, '.%s.tmp' % os.path.basename(self.path))
            with open(tmp, 'w') as f:
                syaml.dump_config(data_to_write,
                                  stream=f,
                                  default_flow_style=False)
            rename(tmp, self.path)

        except (yaml.YAMLError, IOError) as e:
            raise ConfigFileError("Error writing to config file: '%s'" %
                                  str(e))
示例#9
0
    def write_includes(self, path):
        abspath = os.path.abspath(path)
        sections = list(spack.config.section_schemas.keys())
        data = syaml.syaml_dict()
        try:
            for s in sections:
                # we have to check that there is data in each scope
                # or else ill-formatted output can occur
                has_data = False
                for scope in self.config.scopes.values():
                    if scope.get_section(s) is not None:
                        has_data = True
                if(has_data):
                    temp = self.config.get_config(s)
                    data[s] = temp
        except (yaml.YAMLError, IOError):
            raise spack.config.ConfigError("Error reading configuration: %s" % s)

        with open(abspath, 'w') as fout:
            syaml.dump_config(data,
                              stream=fout, default_flow_style=False, blame=False)
def use_latest_git_hashes(env, top_specs, blacklist=blacklist):
    with open(env.manifest_path, 'r') as f:
        yaml = syaml.load(f)

    roots = list(env.roots())

    for i in range(len(roots)):
        if roots[i].name not in blacklist:
            hash_dict = {}
            hash_dict[roots[i].name] = find_latest_git_hash(roots[i])

            for dep in roots[i].dependencies():
                if dep.name not in blacklist:
                    hash_dict[dep.name] = find_latest_git_hash(dep)

            # yaml['spack']['specs'][i] = replace_versions_with_hashes(
            #    roots[i].build_spec, hash_dict)
            yaml['spack']['specs'][i] = str(roots[i].build_spec)

    with open(env.manifest_path, 'w') as fout:
        syaml.dump_config(yaml, stream=fout, default_flow_style=False)
    env._re_read()
示例#11
0
文件: packages.py 项目: w8jcik/spack
def deprecate_paths_and_modules(instance, deprecated_properties):
    """Function to produce warning/error messages if "paths" and "modules" are
    found in "packages.yaml"

    Args:
        instance: instance of the configuration file
        deprecated_properties: deprecated properties in instance

    Returns:
        Warning/Error message to be printed
    """
    import copy
    import os.path

    import llnl.util.tty

    import spack.util.spack_yaml as syaml

    # Copy the instance to remove default attributes that are not related
    # to the part that needs to be reported
    instance_copy = copy.copy(instance)

    # Check if this configuration comes from an environment or not
    absolute_path = instance_copy._end_mark.name
    command_to_suggest = '$ spack config update packages'
    if os.path.basename(absolute_path) == 'spack.yaml':
        command_to_suggest = '$ spack env update <environment>'

    # Retrieve the relevant part of the configuration as YAML
    keys_to_be_removed = [
        x for x in instance_copy if x not in deprecated_properties
    ]
    for key in keys_to_be_removed:
        instance_copy.pop(key)
    yaml_as_str = syaml.dump_config(instance_copy, blame=True)

    if llnl.util.tty.is_debug():
        msg = 'OUTDATED CONFIGURATION FILE [file={0}]\n{1}'
        llnl.util.tty.debug(msg.format(absolute_path, yaml_as_str))

    msg = ('detected deprecated properties in {0}\nActivate the debug '
           'flag to have more information on the deprecated parts or '
           'run:\n\n\t{2}\n\nto update the file to the new format\n')
    return msg.format(
        absolute_path, yaml_as_str, command_to_suggest
    )
示例#12
0
def build_histogram(iterator, key):
    """Builds a histogram of values given an iterable of mappings and a key.

    For each mapping "m" with key "key" in iterator, the value m[key] is
    considered.

    Returns a list of tuples (hash, count, proportion, value), where

      - "hash" is a sha1sum hash of the value.
      - "count" is the number of occurences of values that hash to "hash".
      - "proportion" is the proportion of all values considered above that
        hash to "hash".
      - "value" is one of the values considered above that hash to "hash".
        Which value is chosen when multiple values hash to the same "hash" is
        undefined.

    The list is sorted in descending order by count, yielding the most
    frequently occuring hashes first.
    """
    buckets = defaultdict(int)
    values = {}

    num_objects = 0
    for obj in iterator:
        num_objects += 1

        try:
            val = obj[key]
        except (KeyError, TypeError):
            continue

        value_hash = hashlib.sha1()
        value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
        value_hash = value_hash.hexdigest()

        buckets[value_hash] += 1
        values[value_hash] = val

    return [(h, buckets[h], float(buckets[h]) / num_objects, values[h])
            for h in sorted(buckets.keys(), key=lambda k: -buckets[k])]
示例#13
0
 def write_projections(self):
     if self.projections:
         mkdirp(os.path.dirname(self.projections_path))
         with open(self.projections_path, 'w') as f:
             f.write(s_yaml.dump_config({'projections': self.projections}))
示例#14
0
def external(parser, args):
    extern_dir = get_external_dir()
    if args.list:
        snaps = get_all_snapshots()
        dated = get_ordered_dated_snapshots()
        if snaps and dated:
            non_dated = list(set(snaps) - set(dated))

        def print_snapshots(snaps):
            for s in snaps:
                env_dir = os.path.join(extern_dir, s)
                print(' - {path}'.format(path=env_dir))

        print('-' * 54)
        print('Available snapshot directories are:')
        print('-' * 54)
        if dated:
            print('\nDated Snapshots (ordered)')
            print('-' * 54)
            print_snapshots(dated)
        if non_dated:
            print('\nAdditional Snapshots (unordered)')
            print('-' * 54)
            print_snapshots(non_dated)
        return
    env = ev.active_environment()
    if not env:
        tty.die('spack manager external requires an active environment')
    if args.latest:
        snaps = get_ordered_dated_snapshots()
        if not snaps:
            print('WARNING: No \'externals.yaml\' created because no valid '
                  'snapshots were found. \n'
                  '  If you are trying to use a system level snapshot make '
                  'sure you have SPACK_MANAGER_EXTERNAL pointing to '
                  'spack-manager directory for the system.\n')
            return
        else:
            snap_path = os.path.join(extern_dir, snaps[0])
    else:
        snap_path = args.path

    # check that directory of ext view exists
    if not snap_path or not ev.is_env_dir(snap_path):
        tty.die('External path must point to a spack environment with a view. '
                'Auto detection of the latest dated snapshot can be achived'
                ' with the \'--latest\' flag.')

    snap_env = ev.Environment(snap_path)
    snap_env.check_views()

    if not snap_env.views:
        tty.die('Environments used to create externals must have at least 1'
                ' associated view')
    # copy the file and overwrite any that may exist (or merge?)
    inc_name_abs = os.path.abspath(os.path.join(env.path, args.name))

    try:
        detected = assemble_dict_of_detected_externals(snap_env,
                                                       args.blacklist,
                                                       args.whitelist)
        src = create_yaml_from_detected_externals(detected)
    except ev.SpackEnvironmentError as e:
        tty.die(e.long_message)

    if include_entry_exists(env, args.name):
        if args.merge:
            # merge the existing includes with the new one
            # giving precedent to the new data coming in
            dest = spack.config.read_config_file(
                inc_name_abs, spack.config.section_schemas['packages'])
            combined = spack.config.merge_yaml(src, dest)
            final = combined
        else:
            final = src
    else:
        add_include_entry(env, args.name)
        final = src

    with open(inc_name_abs, 'w') as fout:
        syaml.dump_config(final, stream=fout, default_flow_style=False)

    env.write()
示例#15
0
文件: config.py 项目: hhirtz/spack
 def _write(config, data, scope):
     config_yaml = tmpdir.join(scope, config + '.yaml')
     config_yaml.ensure()
     with config_yaml.open('w') as f:
         syaml.dump_config(data, f)
示例#16
0
文件: ci.py 项目: rexcsn/spack
def test_ci_workarounds():
    fake_root_spec = 'x' * 544
    fake_spack_ref = 'x' * 40

    common_variables = {
        'SPACK_COMPILER_ACTION': 'NONE',
        'SPACK_IS_PR_PIPELINE': 'False',
    }

    common_before_script = [
        'git clone "https://github.com/spack/spack"', ' && '.join(
            ('pushd ./spack',
             'git checkout "{ref}"'.format(ref=fake_spack_ref), 'popd')),
        '. "./spack/share/spack/setup-env.sh"'
    ]

    def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
                       use_dependencies):
        variables = common_variables.copy()
        variables['SPACK_JOB_SPEC_PKG_NAME'] = name

        result = {
            'stage': stage,
            'tags': ['tag-0', 'tag-1'],
            'artifacts': {
                'paths': [
                    'jobs_scratch_dir', 'cdash_report', name + '.spec.yaml',
                    name + '.cdashid', name
                ],
                'when':
                'always'
            },
            'retry': {
                'max': 2,
                'when': ['always']
            },
            'after_script': ['rm -rf "./spack"'],
            'script': ['spack ci rebuild'],
            'image': {
                'name': 'spack/centos7',
                'entrypoint': ['']
            }
        }

        if optimize:
            result['extends'] = ['.c0', '.c1']
        else:
            variables['SPACK_ROOT_SPEC'] = fake_root_spec
            result['before_script'] = common_before_script

        result['variables'] = variables

        if use_dependencies:
            result['dependencies'] = (list(deps)
                                      if use_artifact_buildcache else [])
        else:
            result['needs'] = [{
                'job': dep,
                'artifacts': use_artifact_buildcache
            } for dep in deps]

        return {name: result}

    def make_rebuild_index_job(use_artifact_buildcache, optimize,
                               use_dependencies):

        result = {
            'stage': 'stage-rebuild-index',
            'script': 'spack buildcache update-index -d s3://mirror',
            'tags': ['tag-0', 'tag-1'],
            'image': {
                'name': 'spack/centos7',
                'entrypoint': ['']
            },
            'after_script': ['rm -rf "./spack"'],
        }

        if optimize:
            result['extends'] = '.c0'
        else:
            result['before_script'] = common_before_script

        return {'rebuild-index': result}

    def make_factored_jobs(optimize):
        return {
            '.c0': {
                'before_script': common_before_script
            },
            '.c1': {
                'variables': {
                    'SPACK_ROOT_SPEC': fake_root_spec
                }
            }
        } if optimize else {}

    def make_stage_list(num_build_stages):
        return {
            'stages':
            (['-'.join(('stage', str(i)))
              for i in range(num_build_stages)] + ['stage-rebuild-index'])
        }

    def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
        result = {}

        result.update(
            make_build_job('pkg-a', [], 'stage-0', use_artifact_buildcache,
                           optimize, use_dependencies))

        result.update(
            make_build_job('pkg-b', ['pkg-a'], 'stage-1',
                           use_artifact_buildcache, optimize,
                           use_dependencies))

        result.update(
            make_build_job('pkg-c', ['pkg-a', 'pkg-b'], 'stage-2',
                           use_artifact_buildcache, optimize,
                           use_dependencies))

        result.update(
            make_rebuild_index_job(use_artifact_buildcache, optimize,
                                   use_dependencies))

        result.update(make_factored_jobs(optimize))

        result.update(make_stage_list(3))

        return result

    # test every combination of:
    #     use artifact buildcache: true or false
    #     run optimization pass: true or false
    #     convert needs to dependencies: true or false
    for use_ab in (False, True):
        original = make_yaml_obj(use_artifact_buildcache=use_ab,
                                 optimize=False,
                                 use_dependencies=False)

        for opt, deps in it.product(*(((False, True), ) * 2)):
            # neither optimizing nor converting needs->dependencies
            if not (opt or deps):
                # therefore, nothing to test
                continue

            predicted = make_yaml_obj(use_artifact_buildcache=use_ab,
                                      optimize=opt,
                                      use_dependencies=deps)

            actual = original.copy()
            if opt:
                actual = ci_opt.optimizer(actual)
            if deps:
                actual = cinw.needs_to_dependencies(actual)

            predicted = syaml.dump_config(ci_opt.sort_yaml_obj(predicted),
                                          default_flow_style=True)
            actual = syaml.dump_config(ci_opt.sort_yaml_obj(actual),
                                       default_flow_style=True)

            assert (predicted == actual)
示例#17
0
def optimizer(yaml):
    original_size = len(
        syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))

    # try factoring out commonly repeated portions
    common_job = {
        'variables': {
            'SPACK_COMPILER_ACTION': 'NONE',
            'SPACK_RELATED_BUILDS_CDASH': ''
        },
        'after_script': ['rm -rf "./spack"'],
        'artifacts': {
            'paths': ['jobs_scratch_dir', 'cdash_report'],
            'when': 'always'
        },
    }

    # look for a list of tags that appear frequently
    _, count, proportion, tags = next(
        iter(build_histogram(yaml.values(), 'tags')), (None, ) * 4)

    # If a list of tags is found, and there are more than one job that uses it,
    # *and* the jobs that do use it represent at least 70% of all jobs, then
    # add the list to the prototype object.
    if tags and count > 1 and proportion >= 0.70:
        common_job['tags'] = tags

    # apply common object factorization
    yaml, other, applied, rest = try_optimization_pass(
        'general common object factorization', yaml, common_subobject,
        common_job)

    # look for a common script, and try factoring that out
    _, count, proportion, script = next(
        iter(build_histogram(yaml.values(), 'script')), (None, ) * 4)

    if script and count > 1 and proportion >= 0.70:
        yaml, other, applied, rest = try_optimization_pass(
            'script factorization', yaml, common_subobject, {'script': script})

    # look for a common before_script, and try factoring that out
    _, count, proportion, script = next(
        iter(build_histogram(yaml.values(), 'before_script')), (None, ) * 4)

    if script and count > 1 and proportion >= 0.70:
        yaml, other, applied, rest = try_optimization_pass(
            'before_script factorization', yaml, common_subobject,
            {'before_script': script})

    # Look specifically for the SPACK_ROOT_SPEC environment variables.
    # Try to factor them out.
    h = build_histogram((getattr(val, 'get', lambda *args: {})('variables')
                         for val in yaml.values()), 'SPACK_ROOT_SPEC')

    # In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
    # environment variable; not just the one that appears with the greatest
    # frequency. We only require that more than 1 job uses a given instance's
    # value, because we expect the value to be very large, and so expect even
    # few-to-one factorizations to yield large space savings.
    counter = 0
    for _, count, proportion, spec in h:
        if count <= 1:
            continue

        counter += 1

        yaml, other, applied, rest = try_optimization_pass(
            'SPACK_ROOT_SPEC factorization ({count})'.format(count=counter),
            yaml, common_subobject, {'variables': {
                'SPACK_ROOT_SPEC': spec
            }})

    new_size = len(
        syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))

    print('\n')
    print_delta('overall summary', original_size, new_size)
    print('\n')
    return yaml
示例#18
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            custom_spack_repo=None,
                            custom_spack_ref=None):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']
    ci_mappings = gitlab_ci['mappings']

    final_job_config = None
    if 'final-stage-rebuild-index' in gitlab_ci:
        final_job_config = gitlab_ci['final-stage-rebuild-index']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    # Make sure we use a custom spack if necessary
    before_script = None
    after_script = None
    if custom_spack_repo:
        if not custom_spack_ref:
            custom_spack_ref = 'master'
        before_script = [
            ('git clone "{0}" --branch "{1}" --depth 1 '
             '--single-branch'.format(custom_spack_repo, custom_spack_ref)),
            # Next line just shows spack version in pipeline output
            'pushd ./spack && git rev-parse HEAD && popd',
            '. "./spack/share/spack/setup-env.sh"',
        ]
        after_script = ['rm -rf "./spack"']

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    staged_phases = {}
    for phase in phases:
        phase_name = phase['name']
        with spack.concretize.disable_compiler_existence_check():
            staged_phases[phase_name] = stage_spec_jobs(
                env.spec_lists[phase_name])

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                root_spec = spec_labels[spec_label]['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec,
                                                      ci_mappings)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                debug_flag = ''
                if 'enable-debug-messages' in gitlab_ci:
                    debug_flag = '-d '

                job_scripts = ['spack {0}ci rebuild'.format(debug_flag)]

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                }

                job_dependencies = []
                if spec_label in dependencies:
                    for dep_label in dependencies[spec_label]:
                        dep_pkg = pkg_name_from_spec_label(dep_label)
                        dep_spec = spec_labels[dep_label]['rootSpec'][dep_pkg]
                        dep_job_name = get_job_name(phase_name,
                                                    strip_compilers, dep_spec,
                                                    osname, build_group)
                        job_dependencies.append(dep_job_name)

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add one more dependency to
                # "job_dependencies" (that compiler).
                if is_main_phase(phase_name):
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        bs_arch = bs['spec'].architecture
                        if (bs['spec'].satisfies(compiler_pkg_spec)
                                and bs_arch == release_spec.architecture):
                            c_job_name = get_job_name(bs['phase-name'],
                                                      bs['strip-compilers'],
                                                      bs['spec'], str(bs_arch),
                                                      build_group)
                            job_dependencies.append(c_job_name)

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        related_builds)

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    artifact_paths.append('local_mirror/build_cache')

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_scripts,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'dependencies': job_dependencies,
                }

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    if final_job_config:
        # Add an extra, final job to regenerate the index
        final_stage = 'stage-rebuild-index'
        final_job = {
            'stage': final_stage,
            'script':
            'spack buildcache update-index -d {0}'.format(mirror_urls[0]),
            'tags': final_job_config['tags']
        }
        if 'image' in final_job_config:
            final_job['image'] = final_job_config['image']
        if before_script:
            final_job['before_script'] = before_script
        if after_script:
            final_job['after_script'] = after_script
        output_object['rebuild-index'] = final_job
        stage_names.append(final_stage)

    output_object['stages'] = stage_names

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(output_object, default_flow_style=True))
示例#19
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            run_optimizer=False,
                            use_dependencies=False):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']

    final_job_config = None
    if 'final-stage-rebuild-index' in gitlab_ci:
        final_job_config = gitlab_ci['final-stage-rebuild-index']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    is_pr_pipeline = (os.environ.get('SPACK_IS_PR_PIPELINE',
                                     '').lower() == 'true')

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    staged_phases = {}
    for phase in phases:
        phase_name = phase['name']
        with spack.concretize.disable_compiler_existence_check():
            staged_phases[phase_name] = stage_spec_jobs(
                env.spec_lists[phase_name])

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    max_length_needs = 0
    max_needs_job = ''

    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                root_spec = spec_labels[spec_label]['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec, gitlab_ci)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                job_script = [
                    'spack env activate --without-view .',
                    'spack ci rebuild',
                ]
                if 'script' in runner_attribs:
                    job_script = [s for s in runner_attribs['script']]

                before_script = None
                if 'before_script' in runner_attribs:
                    before_script = [
                        s for s in runner_attribs['before_script']
                    ]

                after_script = None
                if 'after_script' in runner_attribs:
                    after_script = [s for s in runner_attribs['after_script']]

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                    'SPACK_IS_PR_PIPELINE':
                    str(is_pr_pipeline),
                }

                job_dependencies = []
                if spec_label in dependencies:
                    if enable_artifacts_buildcache:
                        dep_jobs = [
                            d for d in release_spec.traverse(deptype=all,
                                                             root=False)
                        ]
                    else:
                        dep_jobs = []
                        for dep_label in dependencies[spec_label]:
                            dep_pkg = pkg_name_from_spec_label(dep_label)
                            dep_root = spec_labels[dep_label]['rootSpec']
                            dep_jobs.append(dep_root[dep_pkg])

                    job_dependencies.extend(
                        format_job_needs(phase_name, strip_compilers, dep_jobs,
                                         osname, build_group,
                                         enable_artifacts_buildcache))

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add more dependencies to
                # the job (that compiler and maybe it's dependencies as well).
                if is_main_phase(phase_name):
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        bs_arch = bs['spec'].architecture
                        if (bs['spec'].satisfies(compiler_pkg_spec)
                                and bs_arch == release_spec.architecture):
                            # We found the bootstrap compiler this release spec
                            # should be built with, so for DAG scheduling
                            # purposes, we will at least add the compiler spec
                            # to the jobs "needs".  But if artifact buildcache
                            # is enabled, we'll have to add all transtive deps
                            # of the compiler as well.
                            dep_jobs = [bs['spec']]
                            if enable_artifacts_buildcache:
                                dep_jobs = [
                                    d for d in bs['spec'].traverse(deptype=all)
                                ]

                            job_dependencies.extend(
                                format_job_needs(bs['phase-name'],
                                                 bs['strip-compilers'],
                                                 dep_jobs, str(bs_arch),
                                                 build_group,
                                                 enable_artifacts_buildcache))

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        sorted(related_builds))

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    bc_root = 'local_mirror/build_cache'
                    artifact_paths.extend([
                        os.path.join(bc_root, p) for p in [
                            bindist.tarball_name(release_spec, '.spec.yaml'),
                            bindist.tarball_name(release_spec, '.cdashid'),
                            bindist.tarball_directory_name(release_spec),
                        ]
                    ])

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_script,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'needs': sorted(job_dependencies, key=lambda d: d['job']),
                    'retry': {
                        'max': 2,
                        'when': JOB_RETRY_CONDITIONS,
                    }
                }

                length_needs = len(job_dependencies)
                if length_needs > max_length_needs:
                    max_length_needs = length_needs
                    max_needs_job = job_name

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    tty.debug('The max_needs_job is {0}, with {1} needs'.format(
        max_needs_job, max_length_needs))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    if final_job_config and not is_pr_pipeline:
        # Add an extra, final job to regenerate the index
        final_stage = 'stage-rebuild-index'
        final_job = {
            'stage':
            final_stage,
            'script':
            'spack buildcache update-index --keys -d {0}'.format(
                mirror_urls[0]),
            'tags':
            final_job_config['tags'],
            'when':
            'always'
        }
        if 'image' in final_job_config:
            final_job['image'] = final_job_config['image']
        if before_script:
            final_job['before_script'] = before_script
        if after_script:
            final_job['after_script'] = after_script
        output_object['rebuild-index'] = final_job
        stage_names.append(final_stage)

    output_object['stages'] = stage_names

    # Capture the version of spack used to generate the pipeline, transform it
    # into a value that can be passed to "git checkout", and save it in a
    # global yaml variable
    spack_version = spack.main.get_version()
    version_to_clone = None
    v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
    if v_match:
        version_to_clone = 'v{0}'.format(v_match.group(0))
    else:
        v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
        if v_match:
            version_to_clone = v_match.group(1)
        else:
            version_to_clone = spack_version

    output_object['variables'] = {
        'SPACK_VERSION': spack_version,
        'SPACK_CHECKOUT_VERSION': version_to_clone,
    }

    sorted_output = {}
    for output_key, output_value in sorted(output_object.items()):
        sorted_output[output_key] = output_value

    # TODO(opadron): remove this or refactor
    if run_optimizer:
        import spack.ci_optimization as ci_opt
        sorted_output = ci_opt.optimizer(sorted_output)

    # TODO(opadron): remove this or refactor
    if use_dependencies:
        import spack.ci_needs_workaround as cinw
        sorted_output = cinw.needs_to_dependencies(sorted_output)

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
示例#20
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            prune_dag=False,
                            check_index_only=False,
                            run_optimizer=False,
                            use_dependencies=False):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    is_pr_pipeline = (os.environ.get('SPACK_IS_PR_PIPELINE',
                                     '').lower() == 'true')

    spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
    pr_mirror_url = None
    if spack_pr_branch:
        pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
                                      spack_pr_branch)

    if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
        tty.die('spack ci generate requires an env containing a mirror')

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    rebuild_index_enabled = True
    if 'rebuild-index' in gitlab_ci and gitlab_ci['rebuild-index'] is False:
        rebuild_index_enabled = False

    temp_storage_url_prefix = None
    if 'temporary-storage-url-prefix' in gitlab_ci:
        temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    # Add this mirror if it's enabled, as some specs might be up to date
    # here and thus not need to be rebuilt.
    if pr_mirror_url:
        add_pr_mirror(pr_mirror_url)

    # Speed up staging by first fetching binary indices from all mirrors
    # (including the per-PR mirror we may have just added above).
    bindist.binary_index.update()

    staged_phases = {}
    try:
        for phase in phases:
            phase_name = phase['name']
            with spack.concretize.disable_compiler_existence_check():
                staged_phases[phase_name] = stage_spec_jobs(
                    env.spec_lists[phase_name],
                    check_index_only=check_index_only)
    finally:
        # Clean up PR mirror if enabled
        if pr_mirror_url:
            remove_pr_mirror()

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    max_length_needs = 0
    max_needs_job = ''

    before_script, after_script = None, None
    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                spec_record = spec_labels[spec_label]
                root_spec = spec_record['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec, gitlab_ci)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                job_script = [
                    'spack env activate --without-view .',
                    'spack ci rebuild',
                ]
                if 'script' in runner_attribs:
                    job_script = [s for s in runner_attribs['script']]

                before_script = None
                if 'before_script' in runner_attribs:
                    before_script = [
                        s for s in runner_attribs['before_script']
                    ]

                after_script = None
                if 'after_script' in runner_attribs:
                    after_script = [s for s in runner_attribs['after_script']]

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                    'SPACK_IS_PR_PIPELINE':
                    str(is_pr_pipeline),
                }

                job_dependencies = []
                if spec_label in dependencies:
                    if enable_artifacts_buildcache:
                        # Get dependencies transitively, so they're all
                        # available in the artifacts buildcache.
                        dep_jobs = [
                            d for d in release_spec.traverse(deptype=all,
                                                             root=False)
                        ]
                    else:
                        # In this case, "needs" is only used for scheduling
                        # purposes, so we only get the direct dependencies.
                        dep_jobs = []
                        for dep_label in dependencies[spec_label]:
                            dep_pkg = pkg_name_from_spec_label(dep_label)
                            dep_root = spec_labels[dep_label]['rootSpec']
                            dep_jobs.append(dep_root[dep_pkg])

                    job_dependencies.extend(
                        format_job_needs(phase_name, strip_compilers, dep_jobs,
                                         osname, build_group, prune_dag,
                                         spec_labels,
                                         enable_artifacts_buildcache))

                rebuild_spec = spec_record['needs_rebuild']

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add more dependencies to
                # the job (that compiler and maybe it's dependencies as well).
                if is_main_phase(phase_name):
                    spec_arch_family = (release_spec.architecture.target.
                                        microarchitecture.family)
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        c_spec = bs['spec']
                        bs_arch = c_spec.architecture
                        bs_arch_family = (
                            bs_arch.target.microarchitecture.family)
                        if (c_spec.satisfies(compiler_pkg_spec)
                                and bs_arch_family == spec_arch_family):
                            # We found the bootstrap compiler this release spec
                            # should be built with, so for DAG scheduling
                            # purposes, we will at least add the compiler spec
                            # to the jobs "needs".  But if artifact buildcache
                            # is enabled, we'll have to add all transtive deps
                            # of the compiler as well.

                            # Here we check whether the bootstrapped compiler
                            # needs to be rebuilt.  Until compilers are proper
                            # dependencies, we artificially force the spec to
                            # be rebuilt if the compiler targeted to build it
                            # needs to be rebuilt.
                            bs_specs, _, _ = staged_phases[bs['phase-name']]
                            c_spec_key = spec_deps_key(c_spec)
                            rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
                            rebuild_spec = rebuild_spec or rbld_comp
                            # Also update record so dependents do not fail to
                            # add this spec to their "needs"
                            spec_record['needs_rebuild'] = rebuild_spec

                            dep_jobs = [c_spec]
                            if enable_artifacts_buildcache:
                                dep_jobs = [
                                    d for d in c_spec.traverse(deptype=all)
                                ]

                            job_dependencies.extend(
                                format_job_needs(bs['phase-name'],
                                                 bs['strip-compilers'],
                                                 dep_jobs, str(bs_arch),
                                                 build_group, prune_dag,
                                                 bs_specs,
                                                 enable_artifacts_buildcache))
                        else:
                            debug_msg = ''.join([
                                'Considered compiler {0} for spec ',
                                '{1}, but rejected it either because it was ',
                                'not the compiler required by the spec, or ',
                                'because the target arch families of the ',
                                'spec and the compiler did not match'
                            ]).format(c_spec, release_spec)
                            tty.debug(debug_msg)

                if prune_dag and not rebuild_spec:
                    continue

                job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        sorted(related_builds))

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    bc_root = 'local_mirror/build_cache'
                    artifact_paths.extend([
                        os.path.join(bc_root, p) for p in [
                            bindist.tarball_name(release_spec, '.spec.yaml'),
                            bindist.tarball_name(release_spec, '.cdashid'),
                            bindist.tarball_directory_name(release_spec),
                        ]
                    ])

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_script,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'needs': sorted(job_dependencies, key=lambda d: d['job']),
                    'retry': {
                        'max': 2,
                        'when': JOB_RETRY_CONDITIONS,
                    },
                    'interruptible': True
                }

                length_needs = len(job_dependencies)
                if length_needs > max_length_needs:
                    max_length_needs = length_needs
                    max_needs_job = job_name

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    if job_id > 0:
        tty.debug('The max_needs_job is {0}, with {1} needs'.format(
            max_needs_job, max_length_needs))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    service_job_config = None
    if 'service-job-attributes' in gitlab_ci:
        service_job_config = gitlab_ci['service-job-attributes']

    default_attrs = [
        'image',
        'tags',
        'variables',
        'before_script',
        # 'script',
        'after_script',
    ]

    if job_id > 0:
        if temp_storage_url_prefix:
            # There were some rebuild jobs scheduled, so we will need to
            # schedule a job to clean up the temporary storage location
            # associated with this pipeline.
            stage_names.append('cleanup-temp-storage')
            cleanup_job = {}

            if service_job_config:
                copy_attributes(default_attrs, service_job_config, cleanup_job)

            cleanup_job['stage'] = 'cleanup-temp-storage'
            cleanup_job['script'] = [
                'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.
                format(temp_storage_url_prefix)
            ]
            cleanup_job['when'] = 'always'

            output_object['cleanup'] = cleanup_job

        if rebuild_index_enabled:
            # Add a final job to regenerate the index
            stage_names.append('stage-rebuild-index')
            final_job = {}

            if service_job_config:
                copy_attributes(default_attrs, service_job_config, final_job)

            index_target_mirror = mirror_urls[0]
            if is_pr_pipeline:
                index_target_mirror = pr_mirror_url

            final_job['stage'] = 'stage-rebuild-index'
            final_job['script'] = [
                'spack buildcache update-index --keys -d {0}'.format(
                    index_target_mirror)
            ]
            final_job['when'] = 'always'

            output_object['rebuild-index'] = final_job

        output_object['stages'] = stage_names

        # Capture the version of spack used to generate the pipeline, transform it
        # into a value that can be passed to "git checkout", and save it in a
        # global yaml variable
        spack_version = spack.main.get_version()
        version_to_clone = None
        v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
        if v_match:
            version_to_clone = 'v{0}'.format(v_match.group(0))
        else:
            v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
            if v_match:
                version_to_clone = v_match.group(1)
            else:
                version_to_clone = spack_version

        output_object['variables'] = {
            'SPACK_VERSION': spack_version,
            'SPACK_CHECKOUT_VERSION': version_to_clone,
        }

        if pr_mirror_url:
            output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url

        sorted_output = {}
        for output_key, output_value in sorted(output_object.items()):
            sorted_output[output_key] = output_value

        # TODO(opadron): remove this or refactor
        if run_optimizer:
            import spack.ci_optimization as ci_opt
            sorted_output = ci_opt.optimizer(sorted_output)

        # TODO(opadron): remove this or refactor
        if use_dependencies:
            import spack.ci_needs_workaround as cinw
            sorted_output = cinw.needs_to_dependencies(sorted_output)
    else:
        # No jobs were generated
        tty.debug('No specs to rebuild, generating no-op job')
        noop_job = {}

        if service_job_config:
            copy_attributes(default_attrs, service_job_config, noop_job)

        if 'script' not in noop_job:
            noop_job['script'] = [
                'echo "All specs already up to date, nothing to rebuild."',
            ]

        sorted_output = {'no-specs-to-rebuild': noop_job}

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
示例#21
0
def release_jobs(parser, args):
    env = ev.get_env(args, 'release-jobs', required=True)

    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    yaml_root = env.yaml['spack']

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    ci_mappings = yaml_root['gitlab-ci']['mappings']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        proj_enc = urlencode({'project': cdash_project})
        eq_idx = proj_enc.find('=') + 1
        cdash_project_enc = proj_enc[eq_idx:]
        cdash_site = ci_cdash['site']

        if args.cdash_credentials:
            with open(args.cdash_credentials) as fd:
                cdash_auth_token = fd.read()
                cdash_auth_token = cdash_auth_token.strip()

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in yaml_root['gitlab-ci']:
        for phase in yaml_root['gitlab-ci']['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    staged_phases = {}
    for phase in phases:
        phase_name = phase['name']
        staged_phases[phase_name] = stage_spec_jobs(env.spec_lists[phase_name])

    if args.print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                release_spec = spec_labels[spec_label]['spec']
                root_spec = spec_labels[spec_label]['rootSpec']

                runner_attribs = find_matching_config(root_spec, ci_mappings)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                job_scripts = ['./bin/rebuild-package.sh']

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_MIRROR_URL':
                    mirror_urls[0],
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                }

                job_dependencies = []
                if spec_label in dependencies:
                    job_dependencies = ([
                        get_job_name(phase_name, strip_compilers,
                                     spec_labels[dep_label]['spec'], osname,
                                     build_group)
                        for dep_label in dependencies[spec_label]
                    ])

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add one more dependency to
                # "job_dependencies" (that compiler).
                if is_main_phase(phase_name):
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        bs_arch = bs['spec'].architecture
                        if (bs['spec'].satisfies(compiler_pkg_spec)
                                and bs_arch == release_spec.architecture):
                            c_job_name = get_job_name(bs['phase-name'],
                                                      bs['strip-compilers'],
                                                      bs['spec'], str(bs_arch),
                                                      build_group)
                            job_dependencies.append(c_job_name)

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BASE_URL'] = cdash_url
                    job_vars['SPACK_CDASH_PROJECT'] = cdash_project
                    job_vars['SPACK_CDASH_PROJECT_ENC'] = cdash_project_enc
                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_CDASH_SITE'] = cdash_site
                    job_vars['SPACK_RELATED_BUILDS'] = ';'.join(related_builds)
                    job_vars['SPACK_JOB_SPEC_BUILDGROUP'] = build_group

                job_vars['SPACK_ENABLE_CDASH'] = str(enable_cdash_reporting)

                variables.update(job_vars)

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_scripts,
                    'tags': tags,
                    'artifacts': {
                        'paths': [
                            'jobs_scratch_dir',
                            'cdash_report',
                            'local_mirror/build_cache',
                        ],
                        'when':
                        'always',
                    },
                    'dependencies': job_dependencies,
                }

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    tty.msg('{0} build jobs generated in {1} stages'.format(job_id, stage_id))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    # Add an extra, final job to regenerate the index
    final_stage = 'stage-rebuild-index'
    final_job = {
        'stage': final_stage,
        'variables': {
            'MIRROR_URL': mirror_urls[0],
        },
        'script': './bin/rebuild-index.sh',
        'tags': ['spack-post-ci']  # may want a runner to handle this
    }
    output_object['rebuild-index'] = final_job
    stage_names.append(final_stage)

    output_object['stages'] = stage_names

    with open(args.output_file, 'w') as outf:
        outf.write(syaml.dump_config(output_object, default_flow_style=True))