Beispiel #1
0
def download_buildcache_files(concrete_spec,
                              local_dest,
                              require_cdashid,
                              mirror_url=None):
    tarfile_name = bindist.tarball_name(concrete_spec, '.spack')
    tarball_dir_name = bindist.tarball_directory_name(concrete_spec)
    tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
    local_tarball_path = os.path.join(local_dest, tarball_dir_name)

    files_to_fetch = [
        {
            'url': tarball_path_name,
            'path': local_tarball_path,
            'required': True,
        },
        {
            'url': bindist.tarball_name(concrete_spec, '.spec.yaml'),
            'path': local_dest,
            'required': True,
        },
        {
            'url': bindist.tarball_name(concrete_spec, '.cdashid'),
            'path': local_dest,
            'required': require_cdashid,
        },
    ]

    return bindist.download_buildcache_entry(files_to_fetch, mirror_url)
Beispiel #2
0
def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
    """Ensure spack buildcache update-index properly fixes up spec descriptor
    files on the mirror when updating the buildcache index."""

    # Create a temp mirror directory for buildcache usage
    mirror_dir = tmpdir.join('mirror_dir')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)
    spack.config.set('mirrors', {'test': mirror_url})

    a = Spec('a').concretized()
    b = Spec('b').concretized()
    new_b_full_hash = 'abcdef'

    # Install package a with dep b
    install_cmd('--no-cache', a.name)

    # Create a buildcache for a and its dep b, and update index
    buildcache_cmd('create', '-uad', mirror_dir.strpath, a.name)
    buildcache_cmd('update-index', '-d', mirror_dir.strpath)

    # Simulate an update to b that only affects full hash by simply overwriting
    # the full hash in the spec.json file on the mirror
    b_spec_json_name = bindist.tarball_name(b, '.spec.json')
    b_spec_json_path = os.path.join(mirror_dir.strpath,
                                    bindist.build_cache_relative_path(),
                                    b_spec_json_name)
    fs.filter_file(r'"full_hash":\s"\S+"',
                   '"full_hash": "{0}"'.format(new_b_full_hash),
                   b_spec_json_path)
    # When we update the index, spack should notice that a's notion of the
    # full hash of b doesn't match b's notion of it's own full hash, and as
    # a result, spack should fix the spec.json for a
    buildcache_cmd('update-index', '-d', mirror_dir.strpath)

    # Read in the concrete spec json of a
    a_spec_json_name = bindist.tarball_name(a, '.spec.json')
    a_spec_json_path = os.path.join(mirror_dir.strpath,
                                    bindist.build_cache_relative_path(),
                                    a_spec_json_name)

    # Turn concrete spec json into a concrete spec (a)
    with open(a_spec_json_path) as fd:
        a_prime = spec.Spec.from_json(fd.read())

    # Make sure the full hash of b in a's spec json matches the new value
    assert (a_prime[b.name].full_hash() == new_b_full_hash)
Beispiel #3
0
def get_buildcache_name(args):
    """Get name (prefix) of buildcache entries for this spec"""
    spec = get_concrete_spec(args)
    buildcache_name = bindist.tarball_name(spec, '')

    print('{0}'.format(buildcache_name))

    sys.exit(0)
Beispiel #4
0
def get_tarball(args):
    """Download buildcache entry from a remote mirror to local folder.  This
    command uses the process exit code to indicate its result, specifically,
    a non-zero exit code indicates that the command failed to download at
    least one of the required buildcache components.  Normally, just the
    tarball and .spec.yaml files are required, but if the --require-cdashid
    argument was provided, then a .cdashid file is also required."""
    if not args.spec and not args.spec_yaml:
        tty.msg('No specs provided, exiting.')
        sys.exit(0)

    if not args.path:
        tty.msg('No download path provided, exiting')
        sys.exit(0)

    spec = get_concrete_spec(args)

    tarfile_name = bindist.tarball_name(spec, '.spack')
    tarball_dir_name = bindist.tarball_directory_name(spec)
    tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
    local_tarball_path = os.path.join(args.path, tarball_dir_name)

    files_to_fetch = [
        {
            'url': tarball_path_name,
            'path': local_tarball_path,
            'required': True,
        },
        {
            'url': bindist.tarball_name(spec, '.spec.yaml'),
            'path': args.path,
            'required': True,
        },
        {
            'url': bindist.tarball_name(spec, '.cdashid'),
            'path': args.path,
            'required': args.require_cdashid,
        },
    ]

    result = bindist.download_buildcache_entry(files_to_fetch)

    if result:
        sys.exit(0)

    sys.exit(1)
Beispiel #5
0
def read_cdashid_from_mirror(spec, mirror_url):
    if not spec.concrete:
        tty.die('Can only read cdashid for concrete spec from mirror')

    buildcache_name = bindist.tarball_name(spec, '')
    cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
    url = os.path.join(mirror_url, bindist.build_cache_relative_path(),
                       cdashid_file_name)

    resp_url, resp_headers, response = web_util.read_from_url(url)
    contents = response.fp.read()

    return int(contents)
Beispiel #6
0
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
    if not spec.concrete:
        tty.die('Can only write cdashid for concrete spec to mirror')

    with TemporaryDirectory() as tmpdir:
        local_cdash_path = os.path.join(tmpdir, 'job.cdashid')
        with open(local_cdash_path, 'w') as fd:
            fd.write(cdashid)

        buildcache_name = bindist.tarball_name(spec, '')
        cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
        remote_url = os.path.join(
            mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)

        tty.debug('pushing cdashid to url')
        tty.debug('  local file path: {0}'.format(local_cdash_path))
        tty.debug('  remote url: {0}'.format(remote_url))
        web_util.push_to_url(local_cdash_path, remote_url)
Beispiel #7
0
def buildcache_sync(args):
    """ Syncs binaries (and associated metadata) from one mirror to another.
    Requires an active environment in order to know which specs to sync.

    Args:
        src (str): Source mirror URL
        dest (str): Destination mirror URL
    """
    # Figure out the source mirror
    source_location = None
    if args.src_directory:
        source_location = args.src_directory
        scheme = url_util.parse(source_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--src-directory" expected a local path; got a URL, instead')
        # Ensure that the mirror lookup does not mistake this for named mirror
        source_location = 'file://' + source_location
    elif args.src_mirror_name:
        source_location = args.src_mirror_name
        result = spack.mirror.MirrorCollection().lookup(source_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=source_location))
    elif args.src_mirror_url:
        source_location = args.src_mirror_url
        scheme = url_util.parse(source_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=source_location))

    src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
    src_mirror_url = url_util.format(src_mirror.fetch_url)

    # Figure out the destination mirror
    dest_location = None
    if args.dest_directory:
        dest_location = args.dest_directory
        scheme = url_util.parse(dest_location, scheme='<missing>').scheme
        if scheme != '<missing>':
            raise ValueError(
                '"--dest-directory" expected a local path; got a URL, instead')
        # Ensure that the mirror lookup does not mistake this for named mirror
        dest_location = 'file://' + dest_location
    elif args.dest_mirror_name:
        dest_location = args.dest_mirror_name
        result = spack.mirror.MirrorCollection().lookup(dest_location)
        if result.name == "<unnamed>":
            raise ValueError('no configured mirror named "{name}"'.format(
                name=dest_location))
    elif args.dest_mirror_url:
        dest_location = args.dest_mirror_url
        scheme = url_util.parse(dest_location, scheme='<missing>').scheme
        if scheme == '<missing>':
            raise ValueError(
                '"{url}" is not a valid URL'.format(url=dest_location))

    dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
    dest_mirror_url = url_util.format(dest_mirror.fetch_url)

    # Get the active environment
    env = spack.cmd.require_active_env(cmd_name='buildcache sync')

    tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
        src_mirror_url, dest_mirror_url))

    build_cache_dir = bindist.build_cache_relative_path()
    buildcache_rel_paths = []

    tty.debug('Syncing the following specs:')
    for s in env.all_specs():
        tty.debug('  {0}{1}: {2}'.format('* ' if s in env.roots() else '  ',
                                         s.name, s.dag_hash()))

        buildcache_rel_paths.extend([
            os.path.join(build_cache_dir,
                         bindist.tarball_path_name(s, '.spack')),
            os.path.join(build_cache_dir,
                         bindist.tarball_name(s, '.spec.yaml')),
            os.path.join(build_cache_dir, bindist.tarball_name(s, '.cdashid'))
        ])

    tmpdir = tempfile.mkdtemp()

    try:
        for rel_path in buildcache_rel_paths:
            src_url = url_util.join(src_mirror_url, rel_path)
            local_path = os.path.join(tmpdir, rel_path)
            dest_url = url_util.join(dest_mirror_url, rel_path)

            tty.debug('Copying {0} to {1} via {2}'.format(
                src_url, dest_url, local_path))

            stage = Stage(src_url,
                          name="temporary_file",
                          path=os.path.dirname(local_path),
                          keep=True)

            try:
                stage.create()
                stage.fetch()
                web_util.push_to_url(local_path, dest_url, keep_original=True)
            except fs.FetchError as e:
                tty.debug(
                    'spack buildcache unable to sync {0}'.format(rel_path))
                tty.debug(e)
            finally:
                stage.destroy()
    finally:
        shutil.rmtree(tmpdir)
Beispiel #8
0
def buildcache_copy(args):
    """Copy a buildcache entry and all its files from one mirror, given as
    '--base-dir', to some other mirror, specified as '--destination-url'.
    The specific buildcache entry to be copied from one location to the
    other is identified using the '--spec-yaml' argument."""
    # TODO: This sub-command should go away once #11117 is merged

    if not args.spec_yaml:
        tty.msg('No spec yaml provided, exiting.')
        sys.exit(1)

    if not args.base_dir:
        tty.msg('No base directory provided, exiting.')
        sys.exit(1)

    if not args.destination_url:
        tty.msg('No destination mirror url provided, exiting.')
        sys.exit(1)

    dest_url = args.destination_url

    if dest_url[0:7] != 'file://' and dest_url[0] != '/':
        tty.msg('Only urls beginning with "file://" or "/" are supported ' +
                'by buildcache copy.')
        sys.exit(1)

    try:
        with open(args.spec_yaml, 'r') as fd:
            spec = Spec.from_yaml(fd.read())
    except Exception as e:
        tty.debug(e)
        tty.error('Unable to concrectize spec from yaml {0}'.format(
            args.spec_yaml))
        sys.exit(1)

    dest_root_path = dest_url
    if dest_url[0:7] == 'file://':
        dest_root_path = dest_url[7:]

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_rel_path = os.path.join(build_cache_dir,
                                    bindist.tarball_path_name(spec, '.spack'))
    tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
    tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)

    specfile_rel_path = os.path.join(build_cache_dir,
                                     bindist.tarball_name(spec, '.spec.yaml'))
    specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)

    cdashidfile_rel_path = os.path.join(build_cache_dir,
                                        bindist.tarball_name(spec, '.cdashid'))
    cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
    cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)

    # Make sure directory structure exists before attempting to copy
    os.makedirs(os.path.dirname(tarball_dest_path))

    # Now copy the specfile and tarball files to the destination mirror
    tty.msg('Copying {0}'.format(tarball_rel_path))
    shutil.copyfile(tarball_src_path, tarball_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path))
    shutil.copyfile(specfile_src_path, specfile_dest_path)

    # Copy the cdashid file (if exists) to the destination mirror
    if os.path.exists(cdashid_src_path):
        tty.msg('Copying {0}'.format(cdashidfile_rel_path))
        shutil.copyfile(cdashid_src_path, cdashid_dest_path)
Beispiel #9
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            prune_dag=False,
                            check_index_only=False,
                            run_optimizer=False,
                            use_dependencies=False):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    is_pr_pipeline = (os.environ.get('SPACK_IS_PR_PIPELINE',
                                     '').lower() == 'true')

    spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
    pr_mirror_url = None
    if spack_pr_branch:
        pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
                                      spack_pr_branch)

    if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
        tty.die('spack ci generate requires an env containing a mirror')

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    rebuild_index_enabled = True
    if 'rebuild-index' in gitlab_ci and gitlab_ci['rebuild-index'] is False:
        rebuild_index_enabled = False

    temp_storage_url_prefix = None
    if 'temporary-storage-url-prefix' in gitlab_ci:
        temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    # Add this mirror if it's enabled, as some specs might be up to date
    # here and thus not need to be rebuilt.
    if pr_mirror_url:
        add_pr_mirror(pr_mirror_url)

    # Speed up staging by first fetching binary indices from all mirrors
    # (including the per-PR mirror we may have just added above).
    bindist.binary_index.update()

    staged_phases = {}
    try:
        for phase in phases:
            phase_name = phase['name']
            with spack.concretize.disable_compiler_existence_check():
                staged_phases[phase_name] = stage_spec_jobs(
                    env.spec_lists[phase_name],
                    check_index_only=check_index_only)
    finally:
        # Clean up PR mirror if enabled
        if pr_mirror_url:
            remove_pr_mirror()

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    max_length_needs = 0
    max_needs_job = ''

    before_script, after_script = None, None
    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                spec_record = spec_labels[spec_label]
                root_spec = spec_record['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec, gitlab_ci)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                job_script = [
                    'spack env activate --without-view .',
                    'spack ci rebuild',
                ]
                if 'script' in runner_attribs:
                    job_script = [s for s in runner_attribs['script']]

                before_script = None
                if 'before_script' in runner_attribs:
                    before_script = [
                        s for s in runner_attribs['before_script']
                    ]

                after_script = None
                if 'after_script' in runner_attribs:
                    after_script = [s for s in runner_attribs['after_script']]

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                    'SPACK_IS_PR_PIPELINE':
                    str(is_pr_pipeline),
                }

                job_dependencies = []
                if spec_label in dependencies:
                    if enable_artifacts_buildcache:
                        # Get dependencies transitively, so they're all
                        # available in the artifacts buildcache.
                        dep_jobs = [
                            d for d in release_spec.traverse(deptype=all,
                                                             root=False)
                        ]
                    else:
                        # In this case, "needs" is only used for scheduling
                        # purposes, so we only get the direct dependencies.
                        dep_jobs = []
                        for dep_label in dependencies[spec_label]:
                            dep_pkg = pkg_name_from_spec_label(dep_label)
                            dep_root = spec_labels[dep_label]['rootSpec']
                            dep_jobs.append(dep_root[dep_pkg])

                    job_dependencies.extend(
                        format_job_needs(phase_name, strip_compilers, dep_jobs,
                                         osname, build_group, prune_dag,
                                         spec_labels,
                                         enable_artifacts_buildcache))

                rebuild_spec = spec_record['needs_rebuild']

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add more dependencies to
                # the job (that compiler and maybe it's dependencies as well).
                if is_main_phase(phase_name):
                    spec_arch_family = (release_spec.architecture.target.
                                        microarchitecture.family)
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        c_spec = bs['spec']
                        bs_arch = c_spec.architecture
                        bs_arch_family = (
                            bs_arch.target.microarchitecture.family)
                        if (c_spec.satisfies(compiler_pkg_spec)
                                and bs_arch_family == spec_arch_family):
                            # We found the bootstrap compiler this release spec
                            # should be built with, so for DAG scheduling
                            # purposes, we will at least add the compiler spec
                            # to the jobs "needs".  But if artifact buildcache
                            # is enabled, we'll have to add all transtive deps
                            # of the compiler as well.

                            # Here we check whether the bootstrapped compiler
                            # needs to be rebuilt.  Until compilers are proper
                            # dependencies, we artificially force the spec to
                            # be rebuilt if the compiler targeted to build it
                            # needs to be rebuilt.
                            bs_specs, _, _ = staged_phases[bs['phase-name']]
                            c_spec_key = spec_deps_key(c_spec)
                            rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
                            rebuild_spec = rebuild_spec or rbld_comp
                            # Also update record so dependents do not fail to
                            # add this spec to their "needs"
                            spec_record['needs_rebuild'] = rebuild_spec

                            dep_jobs = [c_spec]
                            if enable_artifacts_buildcache:
                                dep_jobs = [
                                    d for d in c_spec.traverse(deptype=all)
                                ]

                            job_dependencies.extend(
                                format_job_needs(bs['phase-name'],
                                                 bs['strip-compilers'],
                                                 dep_jobs, str(bs_arch),
                                                 build_group, prune_dag,
                                                 bs_specs,
                                                 enable_artifacts_buildcache))
                        else:
                            debug_msg = ''.join([
                                'Considered compiler {0} for spec ',
                                '{1}, but rejected it either because it was ',
                                'not the compiler required by the spec, or ',
                                'because the target arch families of the ',
                                'spec and the compiler did not match'
                            ]).format(c_spec, release_spec)
                            tty.debug(debug_msg)

                if prune_dag and not rebuild_spec:
                    continue

                job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        sorted(related_builds))

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    bc_root = 'local_mirror/build_cache'
                    artifact_paths.extend([
                        os.path.join(bc_root, p) for p in [
                            bindist.tarball_name(release_spec, '.spec.yaml'),
                            bindist.tarball_name(release_spec, '.cdashid'),
                            bindist.tarball_directory_name(release_spec),
                        ]
                    ])

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_script,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'needs': sorted(job_dependencies, key=lambda d: d['job']),
                    'retry': {
                        'max': 2,
                        'when': JOB_RETRY_CONDITIONS,
                    },
                    'interruptible': True
                }

                length_needs = len(job_dependencies)
                if length_needs > max_length_needs:
                    max_length_needs = length_needs
                    max_needs_job = job_name

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    if job_id > 0:
        tty.debug('The max_needs_job is {0}, with {1} needs'.format(
            max_needs_job, max_length_needs))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    service_job_config = None
    if 'service-job-attributes' in gitlab_ci:
        service_job_config = gitlab_ci['service-job-attributes']

    default_attrs = [
        'image',
        'tags',
        'variables',
        'before_script',
        # 'script',
        'after_script',
    ]

    if job_id > 0:
        if temp_storage_url_prefix:
            # There were some rebuild jobs scheduled, so we will need to
            # schedule a job to clean up the temporary storage location
            # associated with this pipeline.
            stage_names.append('cleanup-temp-storage')
            cleanup_job = {}

            if service_job_config:
                copy_attributes(default_attrs, service_job_config, cleanup_job)

            cleanup_job['stage'] = 'cleanup-temp-storage'
            cleanup_job['script'] = [
                'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.
                format(temp_storage_url_prefix)
            ]
            cleanup_job['when'] = 'always'

            output_object['cleanup'] = cleanup_job

        if rebuild_index_enabled:
            # Add a final job to regenerate the index
            stage_names.append('stage-rebuild-index')
            final_job = {}

            if service_job_config:
                copy_attributes(default_attrs, service_job_config, final_job)

            index_target_mirror = mirror_urls[0]
            if is_pr_pipeline:
                index_target_mirror = pr_mirror_url

            final_job['stage'] = 'stage-rebuild-index'
            final_job['script'] = [
                'spack buildcache update-index --keys -d {0}'.format(
                    index_target_mirror)
            ]
            final_job['when'] = 'always'

            output_object['rebuild-index'] = final_job

        output_object['stages'] = stage_names

        # Capture the version of spack used to generate the pipeline, transform it
        # into a value that can be passed to "git checkout", and save it in a
        # global yaml variable
        spack_version = spack.main.get_version()
        version_to_clone = None
        v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
        if v_match:
            version_to_clone = 'v{0}'.format(v_match.group(0))
        else:
            v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
            if v_match:
                version_to_clone = v_match.group(1)
            else:
                version_to_clone = spack_version

        output_object['variables'] = {
            'SPACK_VERSION': spack_version,
            'SPACK_CHECKOUT_VERSION': version_to_clone,
        }

        if pr_mirror_url:
            output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url

        sorted_output = {}
        for output_key, output_value in sorted(output_object.items()):
            sorted_output[output_key] = output_value

        # TODO(opadron): remove this or refactor
        if run_optimizer:
            import spack.ci_optimization as ci_opt
            sorted_output = ci_opt.optimizer(sorted_output)

        # TODO(opadron): remove this or refactor
        if use_dependencies:
            import spack.ci_needs_workaround as cinw
            sorted_output = cinw.needs_to_dependencies(sorted_output)
    else:
        # No jobs were generated
        tty.debug('No specs to rebuild, generating no-op job')
        noop_job = {}

        if service_job_config:
            copy_attributes(default_attrs, service_job_config, noop_job)

        if 'script' not in noop_job:
            noop_job['script'] = [
                'echo "All specs already up to date, nothing to rebuild."',
            ]

        sorted_output = {'no-specs-to-rebuild': noop_job}

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
Beispiel #10
0
def copy_fn(args):
    """Copy a buildcache entry and all its files from one mirror, given as
    '--base-dir', to some other mirror, specified as '--destination-url'.
    The specific buildcache entry to be copied from one location to the
    other is identified using the '--spec-file' argument."""
    # TODO: Remove after v0.18.0 release
    msg = ('"spack buildcache copy" is deprecated and will be removed from '
           'Spack starting in v0.19.0')
    warnings.warn(msg)

    if not args.spec_file:
        tty.msg('No spec yaml provided, exiting.')
        sys.exit(1)

    if not args.base_dir:
        tty.msg('No base directory provided, exiting.')
        sys.exit(1)

    if not args.destination_url:
        tty.msg('No destination mirror url provided, exiting.')
        sys.exit(1)

    dest_url = args.destination_url

    if dest_url[0:7] != 'file://' and dest_url[0] != '/':
        tty.msg('Only urls beginning with "file://" or "/" are supported ' +
                'by buildcache copy.')
        sys.exit(1)

    try:
        with open(args.spec_file, 'r') as fd:
            spec = Spec.from_yaml(fd.read())
    except Exception as e:
        tty.debug(e)
        tty.error('Unable to concrectize spec from yaml {0}'.format(
            args.spec_file))
        sys.exit(1)

    dest_root_path = dest_url
    if dest_url[0:7] == 'file://':
        dest_root_path = dest_url[7:]

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_rel_path = os.path.join(build_cache_dir,
                                    bindist.tarball_path_name(spec, '.spack'))
    tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
    tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)

    specfile_rel_path = os.path.join(build_cache_dir,
                                     bindist.tarball_name(spec, '.spec.json'))
    specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)

    specfile_rel_path_yaml = os.path.join(
        build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
    specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)

    # Make sure directory structure exists before attempting to copy
    os.makedirs(os.path.dirname(tarball_dest_path))

    # Now copy the specfile and tarball files to the destination mirror
    tty.msg('Copying {0}'.format(tarball_rel_path))
    shutil.copyfile(tarball_src_path, tarball_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path))
    shutil.copyfile(specfile_src_path, specfile_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
    shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
Beispiel #11
0
def get_buildcache_name_fn(args):
    """Get name (prefix) of buildcache entries for this spec"""
    spec = _concrete_spec_from_args(args)
    buildcache_name = bindist.tarball_name(spec, '')
    print('{0}'.format(buildcache_name))
Beispiel #12
0
def generate_gitlab_ci_yaml(env,
                            print_summary,
                            output_file,
                            run_optimizer=False,
                            use_dependencies=False):
    # FIXME: What's the difference between one that opens with 'spack'
    # and one that opens with 'env'?  This will only handle the former.
    with spack.concretize.disable_compiler_existence_check():
        env.concretize()

    yaml_root = ev.config_dict(env.yaml)

    if 'gitlab-ci' not in yaml_root:
        tty.die('Environment yaml does not have "gitlab-ci" section')

    gitlab_ci = yaml_root['gitlab-ci']

    final_job_config = None
    if 'final-stage-rebuild-index' in gitlab_ci:
        final_job_config = gitlab_ci['final-stage-rebuild-index']

    build_group = None
    enable_cdash_reporting = False
    cdash_auth_token = None

    if 'cdash' in yaml_root:
        enable_cdash_reporting = True
        ci_cdash = yaml_root['cdash']
        build_group = ci_cdash['build-group']
        cdash_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        cdash_site = ci_cdash['site']

        if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
            tty.verbose("Using CDash auth token from environment")
            cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')

    is_pr_pipeline = (os.environ.get('SPACK_IS_PR_PIPELINE',
                                     '').lower() == 'true')

    ci_mirrors = yaml_root['mirrors']
    mirror_urls = [url for url in ci_mirrors.values()]

    enable_artifacts_buildcache = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']

    bootstrap_specs = []
    phases = []
    if 'bootstrap' in gitlab_ci:
        for phase in gitlab_ci['bootstrap']:
            try:
                phase_name = phase.get('name')
                strip_compilers = phase.get('compiler-agnostic')
            except AttributeError:
                phase_name = phase
                strip_compilers = False
            phases.append({
                'name': phase_name,
                'strip-compilers': strip_compilers,
            })

            for bs in env.spec_lists[phase_name]:
                bootstrap_specs.append({
                    'spec': bs,
                    'phase-name': phase_name,
                    'strip-compilers': strip_compilers,
                })

    phases.append({
        'name': 'specs',
        'strip-compilers': False,
    })

    staged_phases = {}
    for phase in phases:
        phase_name = phase['name']
        with spack.concretize.disable_compiler_existence_check():
            staged_phases[phase_name] = stage_spec_jobs(
                env.spec_lists[phase_name])

    if print_summary:
        for phase in phases:
            phase_name = phase['name']
            tty.msg('Stages for phase "{0}"'.format(phase_name))
            phase_stages = staged_phases[phase_name]
            print_staging_summary(*phase_stages)

    all_job_names = []
    output_object = {}
    job_id = 0
    stage_id = 0

    stage_names = []

    max_length_needs = 0
    max_needs_job = ''

    for phase in phases:
        phase_name = phase['name']
        strip_compilers = phase['strip-compilers']

        main_phase = is_main_phase(phase_name)
        spec_labels, dependencies, stages = staged_phases[phase_name]

        for stage_jobs in stages:
            stage_name = 'stage-{0}'.format(stage_id)
            stage_names.append(stage_name)
            stage_id += 1

            for spec_label in stage_jobs:
                root_spec = spec_labels[spec_label]['rootSpec']
                pkg_name = pkg_name_from_spec_label(spec_label)
                release_spec = root_spec[pkg_name]

                runner_attribs = find_matching_config(release_spec, gitlab_ci)

                if not runner_attribs:
                    tty.warn('No match found for {0}, skipping it'.format(
                        release_spec))
                    continue

                tags = [tag for tag in runner_attribs['tags']]

                variables = {}
                if 'variables' in runner_attribs:
                    variables.update(runner_attribs['variables'])

                image_name = None
                image_entry = None
                if 'image' in runner_attribs:
                    build_image = runner_attribs['image']
                    try:
                        image_name = build_image.get('name')
                        entrypoint = build_image.get('entrypoint')
                        image_entry = [p for p in entrypoint]
                    except AttributeError:
                        image_name = build_image

                job_script = [
                    'spack env activate --without-view .',
                    'spack ci rebuild',
                ]
                if 'script' in runner_attribs:
                    job_script = [s for s in runner_attribs['script']]

                before_script = None
                if 'before_script' in runner_attribs:
                    before_script = [
                        s for s in runner_attribs['before_script']
                    ]

                after_script = None
                if 'after_script' in runner_attribs:
                    after_script = [s for s in runner_attribs['after_script']]

                osname = str(release_spec.architecture)
                job_name = get_job_name(phase_name, strip_compilers,
                                        release_spec, osname, build_group)

                compiler_action = 'NONE'
                if len(phases) > 1:
                    compiler_action = 'FIND_ANY'
                    if is_main_phase(phase_name):
                        compiler_action = 'INSTALL_MISSING'

                job_vars = {
                    'SPACK_ROOT_SPEC':
                    format_root_spec(root_spec, main_phase, strip_compilers),
                    'SPACK_JOB_SPEC_PKG_NAME':
                    release_spec.name,
                    'SPACK_COMPILER_ACTION':
                    compiler_action,
                    'SPACK_IS_PR_PIPELINE':
                    str(is_pr_pipeline),
                }

                job_dependencies = []
                if spec_label in dependencies:
                    if enable_artifacts_buildcache:
                        dep_jobs = [
                            d for d in release_spec.traverse(deptype=all,
                                                             root=False)
                        ]
                    else:
                        dep_jobs = []
                        for dep_label in dependencies[spec_label]:
                            dep_pkg = pkg_name_from_spec_label(dep_label)
                            dep_root = spec_labels[dep_label]['rootSpec']
                            dep_jobs.append(dep_root[dep_pkg])

                    job_dependencies.extend(
                        format_job_needs(phase_name, strip_compilers, dep_jobs,
                                         osname, build_group,
                                         enable_artifacts_buildcache))

                # This next section helps gitlab make sure the right
                # bootstrapped compiler exists in the artifacts buildcache by
                # creating an artificial dependency between this spec and its
                # compiler.  So, if we are in the main phase, and if the
                # compiler we are supposed to use is listed in any of the
                # bootstrap spec lists, then we will add more dependencies to
                # the job (that compiler and maybe it's dependencies as well).
                if is_main_phase(phase_name):
                    compiler_pkg_spec = compilers.pkg_spec_for_compiler(
                        release_spec.compiler)
                    for bs in bootstrap_specs:
                        bs_arch = bs['spec'].architecture
                        if (bs['spec'].satisfies(compiler_pkg_spec)
                                and bs_arch == release_spec.architecture):
                            # We found the bootstrap compiler this release spec
                            # should be built with, so for DAG scheduling
                            # purposes, we will at least add the compiler spec
                            # to the jobs "needs".  But if artifact buildcache
                            # is enabled, we'll have to add all transtive deps
                            # of the compiler as well.
                            dep_jobs = [bs['spec']]
                            if enable_artifacts_buildcache:
                                dep_jobs = [
                                    d for d in bs['spec'].traverse(deptype=all)
                                ]

                            job_dependencies.extend(
                                format_job_needs(bs['phase-name'],
                                                 bs['strip-compilers'],
                                                 dep_jobs, str(bs_arch),
                                                 build_group,
                                                 enable_artifacts_buildcache))

                if enable_cdash_reporting:
                    cdash_build_name = get_cdash_build_name(
                        release_spec, build_group)
                    all_job_names.append(cdash_build_name)

                    related_builds = []  # Used for relating CDash builds
                    if spec_label in dependencies:
                        related_builds = ([
                            spec_labels[d]['spec'].name
                            for d in dependencies[spec_label]
                        ])

                    job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
                    job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
                        sorted(related_builds))

                variables.update(job_vars)

                artifact_paths = [
                    'jobs_scratch_dir',
                    'cdash_report',
                ]

                if enable_artifacts_buildcache:
                    bc_root = 'local_mirror/build_cache'
                    artifact_paths.extend([
                        os.path.join(bc_root, p) for p in [
                            bindist.tarball_name(release_spec, '.spec.yaml'),
                            bindist.tarball_name(release_spec, '.cdashid'),
                            bindist.tarball_directory_name(release_spec),
                        ]
                    ])

                job_object = {
                    'stage': stage_name,
                    'variables': variables,
                    'script': job_script,
                    'tags': tags,
                    'artifacts': {
                        'paths': artifact_paths,
                        'when': 'always',
                    },
                    'needs': sorted(job_dependencies, key=lambda d: d['job']),
                    'retry': {
                        'max': 2,
                        'when': JOB_RETRY_CONDITIONS,
                    }
                }

                length_needs = len(job_dependencies)
                if length_needs > max_length_needs:
                    max_length_needs = length_needs
                    max_needs_job = job_name

                if before_script:
                    job_object['before_script'] = before_script

                if after_script:
                    job_object['after_script'] = after_script

                if image_name:
                    job_object['image'] = image_name
                    if image_entry is not None:
                        job_object['image'] = {
                            'name': image_name,
                            'entrypoint': image_entry,
                        }

                output_object[job_name] = job_object
                job_id += 1

    tty.debug('{0} build jobs generated in {1} stages'.format(
        job_id, stage_id))

    tty.debug('The max_needs_job is {0}, with {1} needs'.format(
        max_needs_job, max_length_needs))

    # Use "all_job_names" to populate the build group for this set
    if enable_cdash_reporting and cdash_auth_token:
        try:
            populate_buildgroup(all_job_names, build_group, cdash_project,
                                cdash_site, cdash_auth_token, cdash_url)
        except (SpackError, HTTPError, URLError) as err:
            tty.warn('Problem populating buildgroup: {0}'.format(err))
    else:
        tty.warn('Unable to populate buildgroup without CDash credentials')

    if final_job_config and not is_pr_pipeline:
        # Add an extra, final job to regenerate the index
        final_stage = 'stage-rebuild-index'
        final_job = {
            'stage':
            final_stage,
            'script':
            'spack buildcache update-index --keys -d {0}'.format(
                mirror_urls[0]),
            'tags':
            final_job_config['tags'],
            'when':
            'always'
        }
        if 'image' in final_job_config:
            final_job['image'] = final_job_config['image']
        if before_script:
            final_job['before_script'] = before_script
        if after_script:
            final_job['after_script'] = after_script
        output_object['rebuild-index'] = final_job
        stage_names.append(final_stage)

    output_object['stages'] = stage_names

    # Capture the version of spack used to generate the pipeline, transform it
    # into a value that can be passed to "git checkout", and save it in a
    # global yaml variable
    spack_version = spack.main.get_version()
    version_to_clone = None
    v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
    if v_match:
        version_to_clone = 'v{0}'.format(v_match.group(0))
    else:
        v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
        if v_match:
            version_to_clone = v_match.group(1)
        else:
            version_to_clone = spack_version

    output_object['variables'] = {
        'SPACK_VERSION': spack_version,
        'SPACK_CHECKOUT_VERSION': version_to_clone,
    }

    sorted_output = {}
    for output_key, output_value in sorted(output_object.items()):
        sorted_output[output_key] = output_value

    # TODO(opadron): remove this or refactor
    if run_optimizer:
        import spack.ci_optimization as ci_opt
        sorted_output = ci_opt.optimizer(sorted_output)

    # TODO(opadron): remove this or refactor
    if use_dependencies:
        import spack.ci_needs_workaround as cinw
        sorted_output = cinw.needs_to_dependencies(sorted_output)

    with open(output_file, 'w') as outf:
        outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
Beispiel #13
0
def upload_spec(args):
    """Upload a spec to s3 bucket"""
    if not args.spec and not args.spec_yaml:
        tty.error('Cannot upload spec without spec arg or path to spec yaml')
        sys.exit(1)

    if not args.base_dir:
        tty.error('No base directory for buildcache specified')
        sys.exit(1)

    if args.spec:
        try:
            spec = Spec(args.spec)
            spec.concretize()
        except Exception as e:
            tty.debug(e)
            tty.error('Unable to concrectize spec from string {0}'.format(
                args.spec))
            sys.exit(1)
    else:
        try:
            with open(args.spec_yaml, 'r') as fd:
                spec = Spec.from_yaml(fd.read())
        except Exception as e:
            tty.debug(e)
            tty.error('Unable to concrectize spec from yaml {0}'.format(
                args.spec_yaml))
            sys.exit(1)

    s3, bucket_name = get_s3_session(args.endpoint_url)

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_key = os.path.join(
        build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
    tarball_path = os.path.join(args.base_dir, tarball_key)

    specfile_key = os.path.join(
        build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
    specfile_path = os.path.join(args.base_dir, specfile_key)

    cdashidfile_key = os.path.join(
        build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
    cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key)

    tty.msg('Uploading {0}'.format(tarball_key))
    s3.meta.client.upload_file(
        tarball_path, bucket_name,
        os.path.join('mirror', tarball_key),
        ExtraArgs={'ACL': 'public-read'})

    tty.msg('Uploading {0}'.format(specfile_key))
    s3.meta.client.upload_file(
        specfile_path, bucket_name,
        os.path.join('mirror', specfile_key),
        ExtraArgs={'ACL': 'public-read'})

    if os.path.exists(cdashidfile_path):
        tty.msg('Uploading {0}'.format(cdashidfile_key))
        s3.meta.client.upload_file(
            cdashidfile_path, bucket_name,
            os.path.join('mirror', cdashidfile_key),
            ExtraArgs={'ACL': 'public-read'})