Esempio n. 1
0
def check_binaries(args):
    """Check specs (either a single spec from --spec, or else the full set
    of release specs) against remote binary mirror(s) to see if any need
    to be rebuilt.  This command uses the process exit code to indicate
    its result, specifically, if the exit code is non-zero, then at least
    one of the indicated specs needs to be rebuilt.
    """
    if args.spec or args.spec_yaml:
        specs = [get_concrete_spec(args)]
    else:
        release_specs_path = os.path.join(etc_path, 'spack', 'defaults',
                                          'release.yaml')
        spec_set = CombinatorialSpecSet.from_file(release_specs_path)
        specs = [spec for spec in spec_set]

    if not specs:
        tty.msg('No specs provided, exiting.')
        sys.exit(0)

    for spec in specs:
        spec.concretize()

    # Next see if there are any configured binary mirrors
    configured_mirrors = spack.config.get('mirrors', scope=args.scope)

    if args.mirror_url:
        configured_mirrors = {'additionalMirrorUrl': args.mirror_url}

    if not configured_mirrors:
        tty.msg('No mirrors provided, exiting.')
        sys.exit(0)

    sys.exit(
        bindist.check_specs_against_mirrors(configured_mirrors, specs,
                                            args.output_file,
                                            args.rebuild_on_error))
Esempio n. 2
0
def release_jobs(parser, args):
    share_path = os.path.join(spack_root, 'share', 'spack', 'docker')
    os_container_mapping_path = os.path.join(
        share_path, 'os-container-mapping.yaml')

    with open(os_container_mapping_path, 'r') as fin:
        os_container_mapping = syaml.load(fin)

    try:
        validate(os_container_mapping, mapping_schema)
    except ValidationError as val_err:
        tty.error('Ill-formed os-container-mapping configuration object')
        tty.error(os_container_mapping)
        tty.debug(val_err)
        return

    containers = os_container_mapping['containers']

    if args.specs:
        # Just print out the spec labels and all dependency edges in
        # a json format.
        spec_list = [Spec(s) for s in args.specs]
        with open(args.specs_deps_output, 'w') as out:
            compute_spec_deps(spec_list, out)
        return

    current_system = sys_type() if args.resolve_deps_locally else None

    release_specs_path = args.spec_set
    if not release_specs_path:
        raise SpackError('Must provide path to release spec-set')

    release_spec_set = CombinatorialSpecSet.from_file(release_specs_path)

    mirror_url = args.mirror_url

    if not mirror_url:
        raise SpackError('Must provide url of target binary mirror')

    cdash_url = args.cdash_url

    spec_labels, dependencies, stages = stage_spec_jobs(
        release_spec_set, containers, current_system)

    if not stages:
        tty.msg('No jobs staged, exiting.')
        return

    if args.print_summary:
        print_staging_summary(spec_labels, dependencies, stages)

    output_object = {}
    job_count = 0

    stage_names = ['stage-{0}'.format(i) for i in range(len(stages))]
    stage = 0

    for stage_jobs in stages:
        stage_name = stage_names[stage]

        for spec_label in stage_jobs:
            release_spec = spec_labels[spec_label]['spec']
            root_spec = spec_labels[spec_label]['rootSpec']

            pkg_compiler = release_spec.compiler
            pkg_hash = release_spec.dag_hash()

            osname = str(release_spec.architecture)
            job_name = get_job_name(release_spec, osname)
            container_info = containers[osname]
            build_image = container_info['image']

            job_scripts = ['./bin/rebuild-package.sh']

            if 'setup_script' in container_info:
                job_scripts.insert(
                    0, container_info['setup_script'] % pkg_compiler)

            job_dependencies = []
            if spec_label in dependencies:
                job_dependencies = (
                    [get_job_name(spec_labels[dep_label]['spec'], osname)
                        for dep_label in dependencies[spec_label]])

            job_object = {
                'stage': stage_name,
                'variables': {
                    'MIRROR_URL': mirror_url,
                    'CDASH_BASE_URL': cdash_url,
                    'HASH': pkg_hash,
                    'DEPENDENCIES': ';'.join(job_dependencies),
                    'ROOT_SPEC': str(root_spec),
                },
                'script': job_scripts,
                'image': build_image,
                'artifacts': {
                    'paths': [
                        'local_mirror/build_cache',
                        'jobs_scratch_dir',
                        'cdash_report',
                    ],
                    'when': 'always',
                },
                'dependencies': job_dependencies,
            }

            # If we see 'compilers' in the container iformation, it's a
            # filter for the compilers this container can handle, else we
            # assume it can handle any compiler
            if 'compilers' in container_info:
                do_job = False
                for item in container_info['compilers']:
                    container_compiler_spec = CompilerSpec(item['name'])
                    if pkg_compiler == container_compiler_spec:
                        do_job = True
            else:
                do_job = True

            if args.shared_runner_tag:
                job_object['tags'] = [args.shared_runner_tag]

            if args.signing_key:
                job_object['variables']['SIGN_KEY_HASH'] = args.signing_key

            if do_job:
                output_object[job_name] = job_object
                job_count += 1

        stage += 1

    tty.msg('{0} build jobs generated in {1} stages'.format(
        job_count, len(stages)))

    final_stage = 'stage-rebuild-index'

    final_job = {
        'stage': final_stage,
        'variables': {
            'MIRROR_URL': mirror_url,
        },
        'image': build_image,
        'script': './bin/rebuild-index.sh',
    }

    if args.shared_runner_tag:
        final_job['tags'] = [args.shared_runner_tag]

    output_object['rebuild-index'] = final_job
    stage_names.append(final_stage)
    output_object['stages'] = stage_names

    with open(args.output_file, 'w') as outf:
        outf.write(syaml.dump(output_object))