Пример #1
0
    def __init__(self, root, layout, **kwargs):
        super(YamlFilesystemView, self).__init__(root, layout, **kwargs)

        # Super class gets projections from the kwargs
        # YAML specific to get projections from YAML file
        projections_path = os.path.join(self._root, _projections_path)
        if not self.projections:
            if os.path.exists(projections_path):
                # Read projections file from view
                with open(projections_path, 'r') as f:
                    projections_data = s_yaml.load(f)
                    spack.config.validate(projections_data,
                                          spack.schema.projections.schema)
                    self.projections = projections_data['projections']
            else:
                # Write projections file to new view
                # Not strictly necessary as the empty file is the empty
                # projection but it makes sense for consistency
                mkdirp(os.path.dirname(projections_path))
                with open(projections_path, 'w') as f:
                    f.write(s_yaml.dump({'projections': self.projections}))
        elif not os.path.exists(projections_path):
            # Write projections file to new view
            mkdirp(os.path.dirname(projections_path))
            with open(projections_path, 'w') as f:
                f.write(s_yaml.dump({'projections': self.projections}))
        else:
            msg = 'View at %s has projections file' % self._root
            msg += ' and was passed projections manually.'
            raise ConflictingProjectionsError(msg)

        self.extensions_layout = YamlViewExtensionsLayout(self, layout)

        self._croot = colorize_root(self._root) + " "
Пример #2
0
    def to_yaml(self, stream=None):
        provider_list = self._transform(
            lambda vpkg, pset: [
                vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list)

        syaml.dump({'provider_index': {'providers': provider_list}},
                   stream=stream)
Пример #3
0
def add_view(env, extension, link_type):
    view_path = os.path.join(os.environ['SPACK_MANAGER'], 'views', extension,
                             'snapshot')
    view_dict = {
        'snapshot': {
            'root': view_path,
            'projections': {
                'all':
                '{compiler.name}-{compiler.version}/{name}/'
                '{version}-{hash:4}',
                '^cuda':
                '{compiler.name}-{compiler.version}-'
                '{^cuda.name}-{^cuda.version}/{name}/{version}'
                '-{hash:4}',
                '^rocm':
                '{compiler.name}-{compiler.version}-'
                '{^rocm.name}-{^rocm.version}/{name}/{version}'
                '-{hash:4}'
            },
            'link_type': link_type
        }
    }
    with open(env.manifest_path, 'r') as f:
        yaml = syaml.load(f)
    # view yaml entry can also be a bool so first try to add to a dictionary,
    # and if that fails overwrite entry as a dictionary
    try:
        yaml['spack']['view'].update(view_dict)
    except AttributeError:
        yaml['spack']['view'] = view_dict

    with open(env.manifest_path, 'w') as f:
        syaml.dump(yaml, stream=f, default_flow_style=False)
Пример #4
0
    def to_yaml(self, stream=None):
        provider_list = self._transform(
            lambda vpkg, pset: [
                vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list)

        syaml.dump({'provider_index': {'providers': provider_list}},
                   stream=stream)
Пример #5
0
def print_section(section):
    """Print a configuration to stdout."""
    try:
        data = syaml.syaml_dict()
        data[section] = get_config(section)
        syaml.dump(data, stream=sys.stdout, default_flow_style=False)
    except (yaml.YAMLError, IOError) as e:
        raise ConfigError("Error reading configuration: %s" % section)
Пример #6
0
def print_section(section):
    """Print a configuration to stdout."""
    try:
        data = syaml.syaml_dict()
        data[section] = get_config(section)
        syaml.dump(data, stream=sys.stdout, default_flow_style=False)
    except (yaml.YAMLError, IOError) as e:
        raise ConfigError("Error reading configuration: %s" % section)
Пример #7
0
def generate_module_index(root, modules):
    entries = syaml.syaml_dict()
    for m in modules:
        entry = {'path': m.layout.filename, 'use_name': m.layout.use_name}
        entries[m.spec.dag_hash()] = entry
    index = {'module_index': entries}
    index_path = os.path.join(root, 'module-index.yaml')
    llnl.util.filesystem.mkdirp(root)
    with open(index_path, 'w') as index_file:
        syaml.dump(index, default_flow_style=False, stream=index_file)
Пример #8
0
 def write_section(self, section):
     filename = self.get_section_filename(section)
     data = self.get_section(section)
     try:
         mkdirp(self.path)
         with open(filename, 'w') as f:
             _validate_section(data, section_schemas[section])
             syaml.dump(data, stream=f, default_flow_style=False)
     except (yaml.YAMLError, IOError) as e:
         raise ConfigFileError(
             "Error writing to config file: '%s'" % str(e))
Пример #9
0
 def write_section(self, section):
     filename = self.get_section_filename(section)
     data = self.get_section(section)
     try:
         mkdirp(self.path)
         with open(filename, 'w') as f:
             _validate_section(data, section_schemas[section])
             syaml.dump(data, stream=f, default_flow_style=False)
     except (yaml.YAMLError, IOError) as e:
         raise ConfigFileError("Error writing to config file: '%s'" %
                               str(e))
Пример #10
0
    def write_section(self, section):
        _validate(self.sections, self.schema)
        try:
            parent = os.path.dirname(self.path)
            mkdirp(parent)

            tmp = os.path.join(parent, '.%s.tmp' % self.path)
            with open(tmp, 'w') as f:
                syaml.dump(self.sections, stream=f, default_flow_style=False)
            os.path.move(tmp, self.path)
        except (yaml.YAMLError, IOError) as e:
            raise ConfigFileError(
                "Error writing to config file: '%s'" % str(e))
Пример #11
0
    def write_section(self, section):
        validate(self.sections, self.schema)
        try:
            parent = os.path.dirname(self.path)
            mkdirp(parent)

            tmp = os.path.join(parent, '.%s.tmp' % self.path)
            with open(tmp, 'w') as f:
                syaml.dump(self.sections, stream=f, default_flow_style=False)
            os.path.move(tmp, self.path)
        except (yaml.YAMLError, IOError) as e:
            raise ConfigFileError("Error writing to config file: '%s'" %
                                  str(e))
Пример #12
0
def create_projection_file(tmpdir, projection):
    if 'projections' not in projection:
        projection = {'projections': projection}

    projection_file = tmpdir.mkdir('projection').join('projection.yaml')
    projection_file.write(s_yaml.dump(projection))
    return projection_file
Пример #13
0
 def _create(scope=None):
     old_data = {
         'packages': {
             'cmake': {
                 'paths': {'[email protected]': '/usr'}
             },
             'gcc': {
                 'modules': {'[email protected]': 'gcc-8'}
             }
         }
     }
     scope = scope or spack.config.default_modify_scope()
     cfg_file = spack.config.config.get_config_filename(scope, 'packages')
     with open(cfg_file, 'w') as f:
         syaml.dump(old_data, stream=f)
     return cfg_file
Пример #14
0
def generate_module_index(root, modules, overwrite=False):
    index_path = os.path.join(root, 'module-index.yaml')
    if overwrite or not os.path.exists(index_path):
        entries = syaml.syaml_dict()
    else:
        with open(index_path) as index_file:
            yaml_content = syaml.load(index_file)
            entries = yaml_content['module_index']

    for m in modules:
        entry = {'path': m.layout.filename, 'use_name': m.layout.use_name}
        entries[m.spec.dag_hash()] = entry
    index = {'module_index': entries}
    llnl.util.filesystem.mkdirp(root)
    with open(index_path, 'w') as index_file:
        syaml.dump(index, default_flow_style=False, stream=index_file)
Пример #15
0
def write_buildinfo_file(spec, workdir, rel=False):
    """
    Create a cache file containing information
    required for the relocation
    """
    prefix = spec.prefix
    text_to_relocate = []
    binary_to_relocate = []
    link_to_relocate = []
    blacklist = (".spack", "man")
    prefix_to_hash = dict()
    prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
    deps = spack.build_environment.get_rpath_deps(spec.package)
    for d in deps:
        prefix_to_hash[str(d.prefix)] = d.dag_hash()
    # Do this at during tarball creation to save time when tarball unpacked.
    # Used by make_package_relative to determine binaries to change.
    for root, dirs, files in os.walk(prefix, topdown=True):
        dirs[:] = [d for d in dirs if d not in blacklist]
        for filename in files:
            path_name = os.path.join(root, filename)
            m_type, m_subtype = relocate.mime_type(path_name)
            if os.path.islink(path_name):
                link = os.readlink(path_name)
                if os.path.isabs(link):
                    # Relocate absolute links into the spack tree
                    if link.startswith(spack.store.layout.root):
                        rel_path_name = os.path.relpath(path_name, prefix)
                        link_to_relocate.append(rel_path_name)
                    else:
                        msg = 'Absolute link %s to %s ' % (path_name, link)
                        msg += 'outside of prefix %s ' % prefix
                        msg += 'should not be relocated.'
                        tty.warn(msg)

            if relocate.needs_binary_relocation(m_type, m_subtype):
                if not filename.endswith('.o'):
                    rel_path_name = os.path.relpath(path_name, prefix)
                    binary_to_relocate.append(rel_path_name)
            if relocate.needs_text_relocation(m_type, m_subtype):
                rel_path_name = os.path.relpath(path_name, prefix)
                text_to_relocate.append(rel_path_name)

    # Create buildinfo data and write it to disk
    buildinfo = {}
    buildinfo['relative_rpaths'] = rel
    buildinfo['buildpath'] = spack.store.layout.root
    buildinfo['spackprefix'] = spack.paths.prefix
    buildinfo['relative_prefix'] = os.path.relpath(prefix,
                                                   spack.store.layout.root)
    buildinfo['relocate_textfiles'] = text_to_relocate
    buildinfo['relocate_binaries'] = binary_to_relocate
    buildinfo['relocate_links'] = link_to_relocate
    buildinfo['prefix_to_hash'] = prefix_to_hash
    filename = buildinfo_file_name(workdir)
    with open(filename, 'w') as outfile:
        outfile.write(syaml.dump(buildinfo, default_flow_style=True))
Пример #16
0
def write_buildinfo_file(prefix, workdir, rel=False):
    """
    Create a cache file containing information
    required for the relocation
    """
    text_to_relocate = []
    binary_to_relocate = []
    link_to_relocate = []
    blacklist = (".spack", "man")
    # Do this at during tarball creation to save time when tarball unpacked.
    # Used by make_package_relative to determine binaries to change.
    for root, dirs, files in os.walk(prefix, topdown=True):
        dirs[:] = [d for d in dirs if d not in blacklist]
        for filename in files:
            path_name = os.path.join(root, filename)
            m_type, m_subtype = relocate.mime_type(path_name)
            if os.path.islink(path_name):
                link = os.readlink(path_name)
                if os.path.isabs(link):
                    # Relocate absolute links into the spack tree
                    if link.startswith(spack.store.layout.root):
                        rel_path_name = os.path.relpath(path_name, prefix)
                        link_to_relocate.append(rel_path_name)
                    else:
                        msg = 'Absolute link %s to %s ' % (path_name, link)
                        msg += 'outside of stage %s ' % prefix
                        msg += 'cannot be relocated.'
                        tty.warn(msg)

            #  Check if the file contains a string with the installroot.
            #  This cuts down on the number of files added to the list
            #  of files potentially needing relocation
            elif relocate.strings_contains_installroot(
                    path_name,
                    spack.store.layout.root):
                if relocate.needs_binary_relocation(m_type, m_subtype):
                    rel_path_name = os.path.relpath(path_name, prefix)
                    binary_to_relocate.append(rel_path_name)
                elif relocate.needs_text_relocation(m_type, m_subtype):
                    rel_path_name = os.path.relpath(path_name, prefix)
                    text_to_relocate.append(rel_path_name)

    # Create buildinfo data and write it to disk
    buildinfo = {}
    buildinfo['relative_rpaths'] = rel
    buildinfo['buildpath'] = spack.store.layout.root
    buildinfo['spackprefix'] = spack.paths.prefix
    buildinfo['relative_prefix'] = os.path.relpath(
        prefix, spack.store.layout.root)
    buildinfo['relocate_textfiles'] = text_to_relocate
    buildinfo['relocate_binaries'] = binary_to_relocate
    buildinfo['relocate_links'] = link_to_relocate
    filename = buildinfo_file_name(workdir)
    with open(filename, 'w') as outfile:
        outfile.write(syaml.dump(buildinfo, default_flow_style=True))
Пример #17
0
def add_spec(env, extension, data, create_modules):
    ev.activate(env)
    add(data.spec)
    ev.deactivate()
    excludes = view_excludes(data)

    with open(env.manifest_path, 'r') as f:
        yaml = syaml.load(f)

    if create_modules:
        module_excludes = excludes.copy()
        module_path = os.path.join(os.environ['SPACK_MANAGER'], 'modules')
        module_dict = {
            data.id: {
                'enable': ['tcl'],
                'use_view': data.id,
                'prefix_inspections': {
                    'bin': ['PATH']
                },
                'roots': {
                    'tcl': module_path
                },
                'arch_folder': False,
                'tcl': {
                    'projections': {
                        'all': '%s/{name}-%s' % (extension, data.id)
                    },
                    'hash_length': 0,
                    'blacklist_implicits': True,
                    'blacklist': module_excludes
                }
            }
        }
        try:
            yaml['spack']['modules'].update(module_dict)
        except KeyError:
            yaml['spack']['modules'] = module_dict

    with open(env.manifest_path, 'w') as f:
        syaml.dump(yaml, stream=f, default_flow_style=False)
def test_yaml_aliases():
    aliased_list_1 = ['foo']
    aliased_list_2 = []
    dict_with_aliases = {
        'a': aliased_list_1,
        'b': aliased_list_1,
        'c': aliased_list_1,
        'd': aliased_list_2,
        'e': aliased_list_2,
        'f': aliased_list_2,
    }
    string = syaml.dump(dict_with_aliases)

    # ensure no YAML aliases appear in syaml dumps.
    assert '*id' not in string
Пример #19
0
    def test_yaml_aliases(self):
        aliased_list_1 = ['foo']
        aliased_list_2 = []
        dict_with_aliases = {
            'a': aliased_list_1,
            'b': aliased_list_1,
            'c': aliased_list_1,
            'd': aliased_list_2,
            'e': aliased_list_2,
            'f': aliased_list_2,
        }
        string = syaml.dump(dict_with_aliases)

        # ensure no YAML aliases appear in syaml dumps.
        self.assertFalse('*id' in string)
Пример #20
0
    def manifest(self):
        """The spack.yaml file that should be used in the image"""
        import jsonschema
        # Copy in the part of spack.yaml prescribed in the configuration file
        manifest = copy.deepcopy(self.config)
        manifest.pop('container')

        # Ensure that a few paths are where they need to be
        manifest.setdefault('config', syaml.syaml_dict())
        manifest['config']['install_tree'] = self.paths.store
        manifest['view'] = self.paths.view
        manifest = {'spack': manifest}

        # Validate the manifest file
        jsonschema.validate(manifest, schema=spack.schema.env.schema)

        return syaml.dump(manifest, default_flow_style=False).strip()
Пример #21
0
for comp in compilers:
    comp = comp['compiler']
    paths = comp['paths']
    prefixes = []
    for bin, binpath in paths.items():
        binpath = os.path.dirname(binpath)
        if os.path.basename(binpath) == 'bin':
            binpath = os.path.dirname(binpath)
            prefixes.append(binpath)
    prefix = os.path.commonprefix(prefixes)
    if prefix.startswith("/usr") or prefix == "/":
        continue
    ld_paths = os.environ.get("LD_LIBRARY_PATH", "")
    ld_paths = ld_paths.split(os.path.pathsep)
    found_ld_paths = []
    for ld_elm in ld_paths:
        if ld_elm.startswith(prefix) and os.path.isdir(ld_elm):
            found_ld_paths.append(ld_elm)
    if len(found_ld_paths) == 0:
        continue
    found_ld_paths = os.path.pathsep.join(found_ld_paths)
    elm = comp.setdefault('environment', {})
    elm = elm.setdefault('prepend-path', {})
    elm["LD_LIBRARY_PATH"] = found_ld_paths
    print(
        "*** Fixing compiler spec %r with prefix %r to add LD_LIBRARY_PATH %r"
        % (comp['spec'], prefix, found_ld_paths))

with open(fn, "w") as f:
    dump(data, stream=f)
Пример #22
0
def build_tarball(spec,
                  outdir,
                  force=False,
                  rel=False,
                  unsigned=False,
                  allow_root=False,
                  key=None,
                  regenerate_index=False):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    if not spec.concrete:
        raise ValueError('spec must be concrete to build tarball')

    # set up some paths
    tmpdir = tempfile.mkdtemp()
    cache_prefix = build_cache_prefix(tmpdir)

    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
    tarfile_path = os.path.join(tarfile_dir, tarfile_name)
    spackfile_path = os.path.join(cache_prefix,
                                  tarball_path_name(spec, '.spack'))

    remote_spackfile_path = url_util.join(
        outdir, os.path.relpath(spackfile_path, tmpdir))

    mkdirp(tarfile_dir)
    if web_util.url_exists(remote_spackfile_path):
        if force:
            web_util.remove_url(remote_spackfile_path)
        else:
            raise NoOverwriteException(url_util.format(remote_spackfile_path))

    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))

    remote_specfile_path = url_util.join(
        outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))

    if web_util.url_exists(remote_specfile_path):
        if force:
            web_util.remove_url(remote_specfile_path)
        else:
            raise NoOverwriteException(url_util.format(remote_specfile_path))

    # make a copy of the install directory to work with
    workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
    # install_tree copies hardlinks
    # create a temporary tarfile from prefix and exract it to workdir
    # tarfile preserves hardlinks
    temp_tarfile_name = tarball_name(spec, '.tar')
    temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
    with closing(tarfile.open(temp_tarfile_path, 'w')) as tar:
        tar.add(name='%s' % spec.prefix, arcname='.')
    with closing(tarfile.open(temp_tarfile_path, 'r')) as tar:
        tar.extractall(workdir)
    os.remove(temp_tarfile_path)

    # create info for later relocation and create tar
    write_buildinfo_file(spec, workdir, rel)

    # optionally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        try:
            make_package_relative(workdir, spec, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            shutil.rmtree(tmpdir)
            tty.die(e)
    else:
        try:
            check_package_relocatable(workdir, spec, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            shutil.rmtree(tmpdir)
            tty.die(e)

    # create gzip compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir,
                arcname='%s' % os.path.basename(spec.prefix))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = yaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    # Add original install prefix relative to layout root to spec.yaml.
    # This will be used to determine is the directory layout has changed.
    buildinfo = {}
    buildinfo['relative_prefix'] = os.path.relpath(spec.prefix,
                                                   spack.store.layout.root)
    buildinfo['relative_rpaths'] = rel
    spec_dict['buildinfo'] = buildinfo
    spec_dict['full_hash'] = spec.full_hash()

    tty.debug('The full_hash ({0}) of {1} will be written into {2}'.format(
        spec_dict['full_hash'], spec.name,
        url_util.format(remote_specfile_path)))
    tty.debug(spec.tree())

    with open(specfile_path, 'w') as outfile:
        outfile.write(syaml.dump(spec_dict))

    # sign the tarball and spec file with gpg
    if not unsigned:
        key = select_signing_key(key)
        sign_tarball(key, force, specfile_path)

    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name=specfile_path, arcname='%s' % specfile_name)
        if not unsigned:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if not unsigned:
        os.remove('%s.asc' % specfile_path)

    web_util.push_to_url(spackfile_path,
                         remote_spackfile_path,
                         keep_original=False)
    web_util.push_to_url(specfile_path,
                         remote_specfile_path,
                         keep_original=False)

    tty.debug('Buildcache for "{0}" written to \n {1}'.format(
        spec, remote_spackfile_path))

    try:
        # push the key to the build cache's _pgp directory so it can be
        # imported
        if not unsigned:
            push_keys(outdir,
                      keys=[key],
                      regenerate_index=regenerate_index,
                      tmpdir=tmpdir)

        # create an index.json for the build_cache directory so specs can be
        # found
        if regenerate_index:
            generate_package_index(
                url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir)))
    finally:
        shutil.rmtree(tmpdir)

    return None
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
                  allow_root=False, key=None, regenerate_index=False):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    if not spec.concrete:
        raise ValueError('spec must be concrete to build tarball')

    # set up some paths
    build_cache_dir = build_cache_directory(outdir)

    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = os.path.join(build_cache_dir,
                               tarball_directory_name(spec))
    tarfile_path = os.path.join(tarfile_dir, tarfile_name)
    mkdirp(tarfile_dir)
    spackfile_path = os.path.join(
        build_cache_dir, tarball_path_name(spec, '.spack'))
    if os.path.exists(spackfile_path):
        if force:
            os.remove(spackfile_path)
        else:
            raise NoOverwriteException(str(spackfile_path))
    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(
        os.path.join(build_cache_dir, specfile_name))

    if os.path.exists(specfile_path):
        if force:
            os.remove(specfile_path)
        else:
            raise NoOverwriteException(str(specfile_path))
    # make a copy of the install directory to work with
    workdir = os.path.join(tempfile.mkdtemp(), os.path.basename(spec.prefix))
    install_tree(spec.prefix, workdir, symlinks=True)

    # create info for later relocation and create tar
    write_buildinfo_file(spec.prefix, workdir, rel=rel)

    # optinally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        try:
            make_package_relative(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    else:
        try:
            make_package_placeholder(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    # create compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir,
                arcname='%s' % os.path.basename(spec.prefix))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    spec_dict = {}
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    # Add original install prefix relative to layout root to spec.yaml.
    # This will be used to determine is the directory layout has changed.
    buildinfo = {}
    buildinfo['relative_prefix'] = os.path.relpath(
        spec.prefix, spack.store.layout.root)
    spec_dict['buildinfo'] = buildinfo
    spec_dict['full_hash'] = spec.full_hash()

    tty.debug('The full_hash ({0}) of {1} will be written into {2}'.format(
        spec_dict['full_hash'], spec.name, specfile_path))
    tty.debug(spec.tree())

    with open(specfile_path, 'w') as outfile:
        outfile.write(syaml.dump(spec_dict))

    # sign the tarball and spec file with gpg
    if not unsigned:
        sign_tarball(key, force, specfile_path)
    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name='%s' % tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name='%s' % specfile_path, arcname='%s' % specfile_name)
        if not unsigned:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if not unsigned:
        os.remove('%s.asc' % specfile_path)

    # create an index.html for the build_cache directory so specs can be found
    if regenerate_index:
        generate_package_index(build_cache_dir)

    return None
Пример #24
0
 def dumper(configuration):
     content = syaml.dump(configuration, default_flow_style=False)
     config_file = tmpdir / 'spack.yaml'
     config_file.write(content)
     return str(tmpdir)
Пример #25
0
def release_jobs(parser, args):
    share_path = os.path.join(spack_root, 'share', 'spack', 'docker')
    os_container_mapping_path = os.path.join(
        share_path, 'os-container-mapping.yaml')

    with open(os_container_mapping_path, 'r') as fin:
        os_container_mapping = syaml.load(fin)

    try:
        validate(os_container_mapping, mapping_schema)
    except ValidationError as val_err:
        tty.error('Ill-formed os-container-mapping configuration object')
        tty.error(os_container_mapping)
        tty.debug(val_err)
        return

    containers = os_container_mapping['containers']

    if args.specs:
        # Just print out the spec labels and all dependency edges in
        # a json format.
        spec_list = [Spec(s) for s in args.specs]
        with open(args.specs_deps_output, 'w') as out:
            compute_spec_deps(spec_list, out)
        return

    current_system = sys_type() if args.resolve_deps_locally else None

    release_specs_path = args.spec_set
    if not release_specs_path:
        raise SpackError('Must provide path to release spec-set')

    release_spec_set = CombinatorialSpecSet.from_file(release_specs_path)

    mirror_url = args.mirror_url

    if not mirror_url:
        raise SpackError('Must provide url of target binary mirror')

    cdash_url = args.cdash_url

    spec_labels, dependencies, stages = stage_spec_jobs(
        release_spec_set, containers, current_system)

    if not stages:
        tty.msg('No jobs staged, exiting.')
        return

    if args.print_summary:
        print_staging_summary(spec_labels, dependencies, stages)

    output_object = {}
    job_count = 0

    stage_names = ['stage-{0}'.format(i) for i in range(len(stages))]
    stage = 0

    for stage_jobs in stages:
        stage_name = stage_names[stage]

        for spec_label in stage_jobs:
            release_spec = spec_labels[spec_label]['spec']
            root_spec = spec_labels[spec_label]['rootSpec']

            pkg_compiler = release_spec.compiler
            pkg_hash = release_spec.dag_hash()

            osname = str(release_spec.architecture)
            job_name = get_job_name(release_spec, osname)
            container_info = containers[osname]
            build_image = container_info['image']

            job_scripts = ['./bin/rebuild-package.sh']

            if 'setup_script' in container_info:
                job_scripts.insert(
                    0, container_info['setup_script'] % pkg_compiler)

            job_dependencies = []
            if spec_label in dependencies:
                job_dependencies = (
                    [get_job_name(spec_labels[dep_label]['spec'], osname)
                        for dep_label in dependencies[spec_label]])

            job_object = {
                'stage': stage_name,
                'variables': {
                    'MIRROR_URL': mirror_url,
                    'CDASH_BASE_URL': cdash_url,
                    'HASH': pkg_hash,
                    'DEPENDENCIES': ';'.join(job_dependencies),
                    'ROOT_SPEC': str(root_spec),
                },
                'script': job_scripts,
                'image': build_image,
                'artifacts': {
                    'paths': [
                        'local_mirror/build_cache',
                        'jobs_scratch_dir',
                        'cdash_report',
                    ],
                    'when': 'always',
                },
                'dependencies': job_dependencies,
            }

            # If we see 'compilers' in the container iformation, it's a
            # filter for the compilers this container can handle, else we
            # assume it can handle any compiler
            if 'compilers' in container_info:
                do_job = False
                for item in container_info['compilers']:
                    container_compiler_spec = CompilerSpec(item['name'])
                    if pkg_compiler == container_compiler_spec:
                        do_job = True
            else:
                do_job = True

            if args.shared_runner_tag:
                job_object['tags'] = [args.shared_runner_tag]

            if args.signing_key:
                job_object['variables']['SIGN_KEY_HASH'] = args.signing_key

            if do_job:
                output_object[job_name] = job_object
                job_count += 1

        stage += 1

    tty.msg('{0} build jobs generated in {1} stages'.format(
        job_count, len(stages)))

    final_stage = 'stage-rebuild-index'

    final_job = {
        'stage': final_stage,
        'variables': {
            'MIRROR_URL': mirror_url,
        },
        'image': build_image,
        'script': './bin/rebuild-index.sh',
    }

    if args.shared_runner_tag:
        final_job['tags'] = [args.shared_runner_tag]

    output_object['rebuild-index'] = final_job
    stage_names.append(final_stage)
    output_object['stages'] = stage_names

    with open(args.output_file, 'w') as outf:
        outf.write(syaml.dump(output_object))
Пример #26
0
def _create_config(scope=None, data={}, section='packages'):
    scope = scope or spack.config.default_modify_scope()
    cfg_file = spack.config.config.get_config_filename(scope, section)
    with open(cfg_file, 'w') as f:
        syaml.dump(data, stream=f)
    return cfg_file
Пример #27
0
 def to_yaml(self, stream=None):
     return syaml.dump(self.to_dict(), stream)
Пример #28
0
def ci_rebuild(args):
    """Check a single spec against the remote mirror, and rebuild it from
       source if the mirror does not contain the full hash match of the spec
       as computed locally. """
    env = ev.get_env(args, 'ci rebuild', required=True)

    # Make sure the environment is "gitlab-enabled", or else there's nothing
    # to do.
    yaml_root = ev.config_dict(env.yaml)
    gitlab_ci = None
    if 'gitlab-ci' in yaml_root:
        gitlab_ci = yaml_root['gitlab-ci']

    if not gitlab_ci:
        tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')

    # Grab the environment variables we need.  These either come from the
    # pipeline generation step ("spack ci generate"), where they were written
    # out as variables, or else provided by GitLab itself.
    pipeline_artifacts_dir = get_env_var('SPACK_ARTIFACTS_ROOT')
    job_log_dir = get_env_var('SPACK_JOB_LOG_DIR')
    repro_dir = get_env_var('SPACK_JOB_REPRO_DIR')
    local_mirror_dir = get_env_var('SPACK_LOCAL_MIRROR_DIR')
    concrete_env_dir = get_env_var('SPACK_CONCRETE_ENV_DIR')
    ci_pipeline_id = get_env_var('CI_PIPELINE_ID')
    ci_job_name = get_env_var('CI_JOB_NAME')
    signing_key = get_env_var('SPACK_SIGNING_KEY')
    root_spec = get_env_var('SPACK_ROOT_SPEC')
    job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
    compiler_action = get_env_var('SPACK_COMPILER_ACTION')
    cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
    related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
    spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
    pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
    remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')

    # Construct absolute paths relative to current $CI_PROJECT_DIR
    ci_project_dir = get_env_var('CI_PROJECT_DIR')
    pipeline_artifacts_dir = os.path.join(ci_project_dir,
                                          pipeline_artifacts_dir)
    job_log_dir = os.path.join(ci_project_dir, job_log_dir)
    repro_dir = os.path.join(ci_project_dir, repro_dir)
    local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
    concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)

    # Debug print some of the key environment variables we should have received
    tty.debug('pipeline_artifacts_dir = {0}'.format(pipeline_artifacts_dir))
    tty.debug('root_spec = {0}'.format(root_spec))
    tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url))
    tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
    tty.debug('compiler_action = {0}'.format(compiler_action))

    # Query the environment manifest to find out whether we're reporting to a
    # CDash instance, and if so, gather some information from the manifest to
    # support that task.
    enable_cdash = False
    if 'cdash' in yaml_root:
        enable_cdash = True
        ci_cdash = yaml_root['cdash']
        job_spec_buildgroup = ci_cdash['build-group']
        cdash_base_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        proj_enc = urlencode({'project': cdash_project})
        eq_idx = proj_enc.find('=') + 1
        cdash_project_enc = proj_enc[eq_idx:]
        cdash_site = ci_cdash['site']
        tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
        tty.debug('cdash_project = {0}'.format(cdash_project))
        tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
        tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
        tty.debug('cdash_site = {0}'.format(cdash_site))
        tty.debug('related_builds = {0}'.format(related_builds))
        tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))

    # Is this a pipeline run on a spack PR or a merge to develop?  It might
    # be neither, e.g. a pipeline run on some environment repository.
    spack_is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
    spack_is_develop_pipeline = spack_pipeline_type == 'spack_protected_branch'

    tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
        spack_is_pr_pipeline, spack_is_develop_pipeline))

    # Figure out what is our temporary storage mirror: Is it artifacts
    # buildcache?  Or temporary-storage-url-prefix?  In some cases we need to
    # force something or pipelines might not have a way to propagate build
    # artifacts from upstream to downstream jobs.
    pipeline_mirror_url = None

    temp_storage_url_prefix = None
    if 'temporary-storage-url-prefix' in gitlab_ci:
        temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']
        pipeline_mirror_url = url_util.join(temp_storage_url_prefix,
                                            ci_pipeline_id)

    enable_artifacts_mirror = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache']
        if (enable_artifacts_mirror
                or (spack_is_pr_pipeline and not enable_artifacts_mirror
                    and not temp_storage_url_prefix)):
            # If you explicitly enabled the artifacts buildcache feature, or
            # if this is a PR pipeline but you did not enable either of the
            # per-pipeline temporary storage features, we force the use of
            # artifacts buildcache.  Otherwise jobs will not have binary
            # dependencies from previous stages available since we do not
            # allow pushing binaries to the remote mirror during PR pipelines.
            enable_artifacts_mirror = True
            pipeline_mirror_url = 'file://' + local_mirror_dir
            mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format(
                pipeline_mirror_url)
            tty.debug(mirror_msg)

    # Whatever form of root_spec we got, use it to get a map giving us concrete
    # specs for this job and all of its dependencies.
    spec_map = spack_ci.get_concrete_specs(env, root_spec, job_spec_pkg_name,
                                           related_builds, compiler_action)
    job_spec = spec_map[job_spec_pkg_name]

    job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
    job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)

    # To provide logs, cdash reports, etc for developer download/perusal,
    # these things have to be put into artifacts.  This means downstream
    # jobs that "need" this job will get those artifacts too.  So here we
    # need to clean out the artifacts we may have got from upstream jobs.

    cdash_report_dir = os.path.join(pipeline_artifacts_dir, 'cdash_report')
    if os.path.exists(cdash_report_dir):
        shutil.rmtree(cdash_report_dir)

    if os.path.exists(job_log_dir):
        shutil.rmtree(job_log_dir)

    if os.path.exists(repro_dir):
        shutil.rmtree(repro_dir)

    # Now that we removed them if they existed, create the directories we
    # need for storing artifacts.  The cdash_report directory will be
    # created internally if needed.
    os.makedirs(job_log_dir)
    os.makedirs(repro_dir)

    # Copy the concrete environment files to the repro directory so we can
    # expose them as artifacts and not conflict with the concrete environment
    # files we got as artifacts from the upstream pipeline generation job.
    # Try to cast a slightly wider net too, and hopefully get the generated
    # pipeline yaml.  If we miss it, the user will still be able to go to the
    # pipeline generation job and get it from there.
    target_dirs = [concrete_env_dir, pipeline_artifacts_dir]

    for dir_to_list in target_dirs:
        for file_name in os.listdir(dir_to_list):
            src_file = os.path.join(dir_to_list, file_name)
            if os.path.isfile(src_file):
                dst_file = os.path.join(repro_dir, file_name)
                shutil.copyfile(src_file, dst_file)

    # If signing key was provided via "SPACK_SIGNING_KEY", then try to
    # import it.
    if signing_key:
        spack_ci.import_signing_key(signing_key)

    # Depending on the specifics of this job, we might need to turn on the
    # "config:install_missing compilers" option (to build this job spec
    # with a bootstrapped compiler), or possibly run "spack compiler find"
    # (to build a bootstrap compiler or one of its deps in a
    # compiler-agnostic way), or maybe do nothing at all (to build a spec
    # using a compiler already installed on the target system).
    spack_ci.configure_compilers(compiler_action)

    # Write this job's spec yaml into the reproduction directory, and it will
    # also be used in the generated "spack install" command to install the spec
    tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
    with open(job_spec_yaml_path, 'w') as fd:
        fd.write(job_spec.to_yaml(hash=ht.build_hash))

    # Write the concrete root spec yaml into the reproduction directory
    root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
    with open(root_spec_yaml_path, 'w') as fd:
        fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))

    # Write some other details to aid in reproduction into an artifact
    repro_file = os.path.join(repro_dir, 'repro.json')
    repro_details = {
        'job_name': ci_job_name,
        'job_spec_yaml': job_spec_yaml_file,
        'root_spec_yaml': 'root.yaml',
        'ci_project_dir': ci_project_dir
    }
    with open(repro_file, 'w') as fd:
        fd.write(json.dumps(repro_details))

    # Write information about spack into an artifact in the repro dir
    spack_info = spack_ci.get_spack_info()
    spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
    with open(spack_info_file, 'w') as fd:
        fd.write('\n{0}\n'.format(spack_info))

    # If we decided there should be a temporary storage mechanism, add that
    # mirror now so it's used when we check for a full hash match already
    # built for this spec.
    if pipeline_mirror_url:
        spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
                         pipeline_mirror_url, cfg.default_modify_scope())

    cdash_build_id = None
    cdash_build_stamp = None

    # Check configured mirrors for a built spec with a matching full hash
    matches = bindist.get_mirrors_for_spec(job_spec,
                                           full_hash_match=True,
                                           index_only=False)

    if matches:
        # Got a full hash match on at least one configured mirror.  All
        # matches represent the fully up-to-date spec, so should all be
        # equivalent.  If artifacts mirror is enabled, we just pick one
        # of the matches and download the buildcache files from there to
        # the artifacts, so they're available to be used by dependent
        # jobs in subsequent stages.
        tty.msg('No need to rebuild {0}, found full hash match at: '.format(
            job_spec_pkg_name))
        for match in matches:
            tty.msg('    {0}'.format(match['mirror_url']))
        if enable_artifacts_mirror:
            matching_mirror = matches[0]['mirror_url']
            build_cache_dir = os.path.join(local_mirror_dir, 'build_cache')
            tty.debug('Getting {0} buildcache from {1}'.format(
                job_spec_pkg_name, matching_mirror))
            tty.debug('Downloading to {0}'.format(build_cache_dir))
            buildcache.download_buildcache_files(job_spec, build_cache_dir,
                                                 False, matching_mirror)

        # Now we are done and successful
        sys.exit(0)

    # No full hash match anywhere means we need to rebuild spec

    # Start with spack arguments
    install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]

    config = cfg.get('config')
    if not config['verify_ssl']:
        install_args.append('-k')

    install_args.extend([
        'install',
        '--keep-stage',
        '--require-full-hash-match',
    ])

    can_verify = spack_ci.can_verify_binaries()
    verify_binaries = can_verify and spack_is_pr_pipeline is False
    if not verify_binaries:
        install_args.append('--no-check-signature')

    # If CDash reporting is enabled, we first register this build with
    # the specified CDash instance, then relate the build to those of
    # its dependencies.
    if enable_cdash:
        tty.debug('CDash: Registering build')
        (cdash_build_id, cdash_build_stamp) = spack_ci.register_cdash_build(
            cdash_build_name, cdash_base_url, cdash_project, cdash_site,
            job_spec_buildgroup)

        if cdash_build_id is not None:
            cdash_upload_url = '{0}/submit.php?project={1}'.format(
                cdash_base_url, cdash_project_enc)

            install_args.extend([
                '--cdash-upload-url',
                cdash_upload_url,
                '--cdash-build',
                cdash_build_name,
                '--cdash-site',
                cdash_site,
                '--cdash-buildstamp',
                cdash_build_stamp,
            ])

            tty.debug('CDash: Relating build with dependency builds')
            spack_ci.relate_cdash_builds(
                spec_map, cdash_base_url, cdash_build_id, cdash_project,
                [pipeline_mirror_url, pr_mirror_url, remote_mirror_url])

    # A compiler action of 'FIND_ANY' means we are building a bootstrap
    # compiler or one of its deps.
    # TODO: when compilers are dependencies, we should include --no-add
    if compiler_action != 'FIND_ANY':
        install_args.append('--no-add')

    # TODO: once we have the concrete spec registry, use the DAG hash
    # to identify the spec to install, rather than the concrete spec
    # yaml file.
    install_args.extend(['-f', job_spec_yaml_path])

    tty.debug('Installing {0} from source'.format(job_spec.name))
    tty.debug('spack install arguments: {0}'.format(install_args))

    # Write the install command to a shell script
    with open('install.sh', 'w') as fd:
        fd.write('#!/bin/bash\n\n')
        fd.write('\n# spack install command\n')
        fd.write(' '.join(['"{0}"'.format(i) for i in install_args]))
        fd.write('\n')

    st = os.stat('install.sh')
    os.chmod('install.sh', st.st_mode | stat.S_IEXEC)

    install_copy_path = os.path.join(repro_dir, 'install.sh')
    shutil.copyfile('install.sh', install_copy_path)

    # Run the generated install.sh shell script as if it were being run in
    # a login shell.
    try:
        install_process = subprocess.Popen(['bash', '-l', './install.sh'])
        install_process.wait()
        install_exit_code = install_process.returncode
    except (ValueError, subprocess.CalledProcessError, OSError) as inst:
        tty.error('Encountered error running install script')
        tty.error(inst)

    # Now do the post-install tasks
    tty.debug('spack install exited {0}'.format(install_exit_code))

    # If a spec fails to build in a spack develop pipeline, we add it to a
    # list of known broken full hashes.  This allows spack PR pipelines to
    # avoid wasting compute cycles attempting to build those hashes.
    if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
        tty.debug('Install failed on develop')
        if 'broken-specs-url' in gitlab_ci:
            broken_specs_url = gitlab_ci['broken-specs-url']
            dev_fail_hash = job_spec.full_hash()
            broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
            tty.msg('Reporting broken develop build as: {0}'.format(
                broken_spec_path))
            tmpdir = tempfile.mkdtemp()
            empty_file_path = os.path.join(tmpdir, 'empty.txt')

            broken_spec_details = {
                'broken-spec': {
                    'job-url': get_env_var('CI_JOB_URL'),
                    'pipeline-url': get_env_var('CI_PIPELINE_URL'),
                    'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
                }
            }

            try:
                with open(empty_file_path, 'w') as efd:
                    efd.write(syaml.dump(broken_spec_details))
                web_util.push_to_url(empty_file_path,
                                     broken_spec_path,
                                     keep_original=False,
                                     extra_args={'ContentType': 'text/plain'})
            except Exception as err:
                # If we got some kind of S3 (access denied or other connection
                # error), the first non boto-specific class in the exception
                # hierarchy is Exception.  Just print a warning and return
                msg = 'Error writing to broken specs list {0}: {1}'.format(
                    broken_spec_path, err)
                tty.warn(msg)
            finally:
                shutil.rmtree(tmpdir)

    # We generated the "spack install ..." command to "--keep-stage", copy
    # any logs from the staging directory to artifacts now
    spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)

    # Create buildcache on remote mirror, either on pr-specific mirror or
    # on the main mirror defined in the gitlab-enabled spack environment
    if spack_is_pr_pipeline:
        buildcache_mirror_url = pr_mirror_url
    else:
        buildcache_mirror_url = remote_mirror_url

    # If the install succeeded, create a buildcache entry for this job spec
    # and push it to one or more mirrors.  If the install did not succeed,
    # print out some instructions on how to reproduce this build failure
    # outside of the pipeline environment.
    if install_exit_code == 0:
        can_sign = spack_ci.can_sign_binaries()
        sign_binaries = can_sign and spack_is_pr_pipeline is False

        # Create buildcache in either the main remote mirror, or in the
        # per-PR mirror, if this is a PR pipeline
        if buildcache_mirror_url:
            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
                                          buildcache_mirror_url, sign_binaries)

            if cdash_build_id:
                tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
                    cdash_build_id, buildcache_mirror_url))
                spack_ci.write_cdashid_to_mirror(cdash_build_id, job_spec,
                                                 buildcache_mirror_url)

        # Create another copy of that buildcache in the per-pipeline
        # temporary storage mirror (this is only done if either
        # artifacts buildcache is enabled or a temporary storage url
        # prefix is set)
        if pipeline_mirror_url:
            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
                                          pipeline_mirror_url, sign_binaries)

            if cdash_build_id:
                tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
                    cdash_build_id, pipeline_mirror_url))
                spack_ci.write_cdashid_to_mirror(cdash_build_id, job_spec,
                                                 pipeline_mirror_url)

        # If this is a develop pipeline, check if the spec that we just built is
        # on the broken-specs list. If so, remove it.
        if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
            broken_specs_url = gitlab_ci['broken-specs-url']
            just_built_hash = job_spec.full_hash()
            broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
            if web_util.url_exists(broken_spec_path):
                tty.msg('Removing {0} from the list of broken specs'.format(
                    broken_spec_path))
                try:
                    web_util.remove_url(broken_spec_path)
                except Exception as err:
                    # If we got some kind of S3 (access denied or other connection
                    # error), the first non boto-specific class in the exception
                    # hierarchy is Exception.  Just print a warning and return
                    msg = 'Error removing {0} from broken specs list: {1}'.format(
                        broken_spec_path, err)
                    tty.warn(msg)

    else:
        tty.debug('spack install exited non-zero, will not create buildcache')

        api_root_url = get_env_var('CI_API_V4_URL')
        ci_project_id = get_env_var('CI_PROJECT_ID')
        ci_job_id = get_env_var('CI_JOB_ID')

        repro_job_url = '{0}/projects/{1}/jobs/{2}/artifacts'.format(
            api_root_url, ci_project_id, ci_job_id)

        # Control characters cause this to be printed in blue so it stands out
        reproduce_msg = """

\033[34mTo reproduce this build locally, run:

    spack ci reproduce-build {0} [--working-dir <dir>]

If this project does not have public pipelines, you will need to first:

    export GITLAB_PRIVATE_TOKEN=<generated_token>

... then follow the printed instructions.\033[0;0m

""".format(repro_job_url)

        print(reproduce_msg)

    # Tie job success/failure to the success/failure of building the spec
    return install_exit_code
Пример #29
0
def test_ordered_read_not_required_for_consistent_dag_hash(
        config, mock_packages):
    """Make sure ordered serialization isn't required to preserve hashes.

    For consistent hashes, we require that YAML and json documents
    have their keys serialized in a deterministic order. However, we
    don't want to require them to be serialized in order. This
    ensures that is not required.
    """
    specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
    for spec in specs:
        spec = Spec(spec)
        spec.concretize()

        #
        # Dict & corresponding YAML & JSON from the original spec.
        #
        spec_dict = spec.to_dict()
        spec_yaml = spec.to_yaml()
        spec_json = spec.to_json()

        #
        # Make a spec with reversed OrderedDicts for every
        # OrderedDict in the original.
        #
        reversed_spec_dict = reverse_all_dicts(spec.to_dict())

        #
        # Dump to YAML and JSON
        #
        yaml_string = syaml.dump(spec_dict, default_flow_style=False)
        reversed_yaml_string = syaml.dump(reversed_spec_dict,
                                          default_flow_style=False)
        json_string = sjson.dump(spec_dict)
        reversed_json_string = sjson.dump(reversed_spec_dict)

        #
        # Do many consistency checks
        #

        # spec yaml is ordered like the spec dict
        assert yaml_string == spec_yaml
        assert json_string == spec_json

        # reversed string is different from the original, so it
        # *would* generate a different hash
        assert yaml_string != reversed_yaml_string
        assert json_string != reversed_json_string

        # build specs from the "wrongly" ordered data
        round_trip_yaml_spec = Spec.from_yaml(yaml_string)
        round_trip_json_spec = Spec.from_json(json_string)
        round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string)
        round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string)

        # TODO: remove this when build deps are in provenance.
        spec = spec.copy(deps=('link', 'run'))
        # specs are equal to the original
        assert spec == round_trip_yaml_spec
        assert spec == round_trip_json_spec
        assert spec == round_trip_reversed_yaml_spec
        assert spec == round_trip_reversed_json_spec
        assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
        assert round_trip_json_spec == round_trip_reversed_json_spec
        # dag_hashes are equal
        assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_json_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
        # full_hashes are equal
        spec.concretize()
        round_trip_yaml_spec.concretize()
        round_trip_json_spec.concretize()
        round_trip_reversed_yaml_spec.concretize()
        round_trip_reversed_json_spec.concretize()
        assert spec.full_hash() == round_trip_yaml_spec.full_hash()
        assert spec.full_hash() == round_trip_json_spec.full_hash()
        assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
        assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
Пример #30
0
def test_ordered_read_not_required_for_consistent_dag_hash(
        config, builtin_mock
):
    """Make sure ordered serialization isn't required to preserve hashes.

    For consistent hashes, we require that YAML and json documents
    have their keys serialized in a deterministic order. However, we
    don't want to require them to be serialized in order. This
    ensures that is not required.
    """
    specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
    for spec in specs:
        spec = Spec(spec)
        spec.concretize()

        #
        # Dict & corresponding YAML & JSON from the original spec.
        #
        spec_dict = spec.to_dict()
        spec_yaml = spec.to_yaml()
        spec_json = spec.to_json()

        #
        # Make a spec with reversed OrderedDicts for every
        # OrderedDict in the original.
        #
        reversed_spec_dict = reverse_all_dicts(spec.to_dict())

        #
        # Dump to YAML and JSON
        #
        yaml_string = syaml.dump(spec_dict, default_flow_style=False)
        reversed_yaml_string = syaml.dump(reversed_spec_dict,
                                          default_flow_style=False)
        json_string = sjson.dump(spec_dict)
        reversed_json_string = sjson.dump(reversed_spec_dict)

        #
        # Do many consistency checks
        #

        # spec yaml is ordered like the spec dict
        assert yaml_string == spec_yaml
        assert json_string == spec_json

        # reversed string is different from the original, so it
        # *would* generate a different hash
        assert yaml_string != reversed_yaml_string
        assert json_string != reversed_json_string

        # build specs from the "wrongly" ordered data
        round_trip_yaml_spec = Spec.from_yaml(yaml_string)
        round_trip_json_spec = Spec.from_json(json_string)
        round_trip_reversed_yaml_spec = Spec.from_yaml(
            reversed_yaml_string
        )
        round_trip_reversed_json_spec = Spec.from_yaml(
            reversed_json_string
        )

        # TODO: remove this when build deps are in provenance.
        spec = spec.copy(deps=('link', 'run'))
        # specs are equal to the original
        assert spec == round_trip_yaml_spec
        assert spec == round_trip_json_spec
        assert spec == round_trip_reversed_yaml_spec
        assert spec == round_trip_reversed_json_spec
        assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
        assert round_trip_json_spec == round_trip_reversed_json_spec
        # dag_hashes are equal
        assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_json_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()