Exemplo n.º 1
0
Arquivo: config.py Projeto: d-tk/spack
def _read_config_file(filename, schema):
    """Read a YAML configuration file."""
    # Ignore nonexisting files.
    if not os.path.exists(filename):
        return None

    elif not os.path.isfile(filename):
        raise ConfigFileError(
            "Invlaid configuration. %s exists but is not a file." % filename)

    elif not os.access(filename, os.R_OK):
        raise ConfigFileError("Config file is not readable: %s" % filename)

    try:
        tty.debug("Reading config file %s" % filename)
        with open(filename) as f:
            data = syaml.load(f)

        if data:
            validate_section(data, schema)
        return data

    except MarkedYAMLError as e:
        raise ConfigFileError(
            "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))

    except IOError as e:
        raise ConfigFileError(
            "Error reading configuration file %s: %s" % (filename, str(e)))
Exemplo n.º 2
0
    def test_all_is_not_a_virtual(self):
        """Verify that `all` is allowed in packages.yaml."""
        conf = syaml.load("""\
all:
        variants: [+mpi]
""")
        spack.config.set('packages', conf, scope='concretize')

        # should be no error for 'all':
        spack.package_prefs.PackagePrefs.clear_caches()
        spack.package_prefs.get_packages_config()
    def test_all_is_not_a_virtual(self):
        """Verify that `all` is allowed in packages.yaml."""
        conf = syaml.load("""\
all:
        variants: [+mpi]
""")
        spack.config.update_config('packages', conf, 'concretize')

        # should be no error for 'all':
        spack.package_prefs._pkgsort = PreferredPackages()
        spack.package_prefs.get_packages_config()
Exemplo n.º 4
0
    def test_no_virtuals_in_packages_yaml(self):
        """Verify that virtuals are not allowed in packages.yaml."""

        # set up a packages.yaml file with a vdep as a key.  We use
        # syaml.load here to make sure source lines in the config are
        # attached to parsed strings, as the error message uses them.
        conf = syaml.load("""\
mpi:
    paths:
      [email protected]: /path/to/lapack
""")
        spack.config.set('packages', conf, scope='concretize')

        # now when we get the packages.yaml config, there should be an error
        with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
            spack.package_prefs.get_packages_config()
Exemplo n.º 5
0
def data():
    """Returns the data loaded from a test file"""
    test_file = """\
config_file:
  x86_64:
    foo: /path/to/foo
    bar: /path/to/bar
    baz: /path/to/baz
  some_list:
    - item 1
    - item 2
    - item 3
  another_list:
    [ 1, 2, 3 ]
  some_key: some_string
"""
    return syaml.load(test_file)
Exemplo n.º 6
0
def check_compiler_yaml_version():
    config_scopes = spack.config.config_scopes
    for scope in config_scopes.values():
        file_name = os.path.join(scope.path, 'compilers.yaml')
        data = None
        if os.path.isfile(file_name):
            with open(file_name) as f:
                data = syaml.load(f)

        if data:
            compilers = data['compilers']
            if len(compilers) > 0:
                if (not isinstance(compilers, list) or
                    'operating_system' not in compilers[0]['compiler']):
                    new_file = os.path.join(scope.path, '_old_compilers.yaml')
                    tty.warn('%s in out of date compilers format. '
                             'Moved to %s. Spack automatically generate '
                             'a compilers config file '
                             % (file_name, new_file))
                    os.rename(file_name, new_file)
Exemplo n.º 7
0
def configure_permissions():
    conf = syaml.load("""\
all:
  permissions:
    read: group
    write: group
    group: all
mpich:
  permissions:
    read: user
    write: user
mpileaks:
  permissions:
    write: user
    group: mpileaks
callpath:
  permissions:
    write: world
""")
    spack.config.set('packages', conf, scope='concretize')

    yield
Exemplo n.º 8
0
    def test_external_mpi(self):
        # make sure this doesn't give us an external first.
        spec = Spec('mpi')
        spec.concretize()
        assert not spec['mpi'].external

        # load config
        conf = syaml.load("""\
all:
    providers:
        mpi: [mpich]
mpich:
    buildable: false
    paths:
        [email protected]: /dummy/path
""")
        spack.config.set('packages', conf, scope='concretize')

        # ensure that once config is in place, external is used
        spec = Spec('mpi')
        spec.concretize()
        assert spec['mpich'].external_path == '/dummy/path'
Exemplo n.º 9
0
    def from_yaml(stream):
        try:
            yfile = syaml.load(stream)
        except MarkedYAMLError as e:
            raise spack.spec.SpackYAMLError(
                "error parsing YAML ProviderIndex cache:", str(e))

        if not isinstance(yfile, dict):
            raise ProviderIndexError("YAML ProviderIndex was not a dict.")

        if 'provider_index' not in yfile:
            raise ProviderIndexError(
                "YAML ProviderIndex does not start with 'provider_index'")

        index = ProviderIndex()
        providers = yfile['provider_index']['providers']
        index.providers = _transform(
            providers,
            lambda vpkg, plist: (
                spack.spec.Spec.from_node_dict(vpkg),
                set(spack.spec.Spec.from_node_dict(p) for p in plist)))
        return index
Exemplo n.º 10
0
def configure_permissions():
    conf = syaml.load("""\
all:
  permissions:
    read: group
    write: group
    group: all
mpich:
  permissions:
    read: user
    write: user
mpileaks:
  permissions:
    write: user
    group: mpileaks
callpath:
  permissions:
    write: world
""")
    spack.config.set('packages', conf, scope='concretize')

    yield
Exemplo n.º 11
0
    def test_external_mpi(self):
        # make sure this doesn't give us an external first.
        spec = Spec('mpi')
        spec.concretize()
        assert not spec['mpi'].external

        # load config
        conf = syaml.load("""\
all:
    providers:
        mpi: [mpich]
mpich:
    buildable: false
    paths:
        [email protected]: /dummy/path
""")
        spack.config.update_config('packages', conf, 'concretize')

        # ensure that once config is in place, external is used
        spec = Spec('mpi')
        spec.concretize()
        assert spec['mpich'].external_path == '/dummy/path'
Exemplo n.º 12
0
    def from_yaml(stream):
        try:
            yfile = syaml.load(stream)
        except MarkedYAMLError as e:
            raise spack.spec.SpackYAMLError(
                "error parsing YAML ProviderIndex cache:", str(e))

        if not isinstance(yfile, dict):
            raise ProviderIndexError("YAML ProviderIndex was not a dict.")

        if 'provider_index' not in yfile:
            raise ProviderIndexError(
                "YAML ProviderIndex does not start with 'provider_index'")

        index = ProviderIndex()
        providers = yfile['provider_index']['providers']
        index.providers = _transform(
            providers,
            lambda vpkg, plist: (
                spack.spec.Spec.from_node_dict(vpkg),
                set(spack.spec.Spec.from_node_dict(p) for p in plist)))
        return index
Exemplo n.º 13
0
def test_ci_generate_with_external_pkg(tmpdir, mutable_mock_env_path,
                                       env_deactivate, install_mockery,
                                       mock_packages, monkeypatch):
    """Make sure we do not generate jobs for external pkgs"""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  specs:
    - archive-files
    - externaltest
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    mappings:
      - match:
          - archive-files
          - externaltest
        runner-attributes:
          tags:
            - donotcare
          image: donotcare
""")

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            monkeypatch.setattr(ci, 'SPACK_PR_MIRRORS_ROOT_URL',
                                r"file:///fake/mirror")
            ci_cmd('generate', '--output-file', outputfile)

        with open(outputfile) as f:
            yaml_contents = syaml.load(f)

        # Check that the "externaltool" package was not erroneously staged
        assert not any('externaltool' in key for key in yaml_contents)
Exemplo n.º 14
0
    def __init__(self, yaml_like, ignore_invalid=True):
        """Construct a combinatorial Spec set.

        Args:
            yaml_like: either raw YAML data as a dict, a file-like object
                to read the YAML from, or a string containing YAML.  In the
                first case, we assume already-parsed YAML data.  In the second
                two cases, we just run yaml.load() on the data.
            ignore_invalid (bool): whether to ignore invalid specs when
                expanding the values of this spec set.
        """
        self.ignore_invalid = ignore_invalid

        if isinstance(yaml_like, dict):
            # if it's raw data, just assign it to self.data
            self.data = yaml_like
        else:
            # otherwise try to load it.
            self.data = syaml.load(yaml_like)

        # validate against the spec set schema
        validate(self.data, spec_set_schema.schema)

        # chop off the initial spec-set label after valiation.
        self.data = self.data['spec-set']

        # initialize these from data.
        self.cdash = self.data.get('cdash', None)
        if isinstance(self.cdash, str):
            self.cdash = [self.cdash]
        self.project = self.data.get('project', None)

        # _spec_lists is a list of lists of specs, to be combined as a
        # cartesian product when we iterate over all specs in the set.
        # it's initialized lazily.
        self._spec_lists = None
        self._include = []
        self._exclude = []
Exemplo n.º 15
0
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
                  allow_root=False, key=None, regenerate_index=False):
    """
    Build a tarball from given spec and put it into the directory structure
    used at the mirror (following <tarball_directory_name>).
    """
    if not spec.concrete:
        raise ValueError('spec must be concrete to build tarball')

    # set up some paths
    build_cache_dir = build_cache_directory(outdir)

    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_dir = os.path.join(build_cache_dir,
                               tarball_directory_name(spec))
    tarfile_path = os.path.join(tarfile_dir, tarfile_name)
    mkdirp(tarfile_dir)
    spackfile_path = os.path.join(
        build_cache_dir, tarball_path_name(spec, '.spack'))
    if os.path.exists(spackfile_path):
        if force:
            os.remove(spackfile_path)
        else:
            raise NoOverwriteException(str(spackfile_path))
    # need to copy the spec file so the build cache can be downloaded
    # without concretizing with the current spack packages
    # and preferences
    spec_file = os.path.join(spec.prefix, ".spack", "spec.yaml")
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.realpath(
        os.path.join(build_cache_dir, specfile_name))

    if os.path.exists(specfile_path):
        if force:
            os.remove(specfile_path)
        else:
            raise NoOverwriteException(str(specfile_path))
    # make a copy of the install directory to work with
    workdir = os.path.join(tempfile.mkdtemp(), os.path.basename(spec.prefix))
    install_tree(spec.prefix, workdir, symlinks=True)

    # create info for later relocation and create tar
    write_buildinfo_file(spec.prefix, workdir, rel=rel)

    # optinally make the paths in the binaries relative to each other
    # in the spack install tree before creating tarball
    if rel:
        try:
            make_package_relative(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    else:
        try:
            make_package_placeholder(workdir, spec.prefix, allow_root)
        except Exception as e:
            shutil.rmtree(workdir)
            shutil.rmtree(tarfile_dir)
            tty.die(str(e))
    # create compressed tarball of the install prefix
    with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
        tar.add(name='%s' % workdir,
                arcname='%s' % os.path.basename(spec.prefix))
    # remove copy of install directory
    shutil.rmtree(workdir)

    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # add sha256 checksum to spec.yaml
    spec_dict = {}
    with open(spec_file, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = {}
    bchecksum['hash_algorithm'] = 'sha256'
    bchecksum['hash'] = checksum
    spec_dict['binary_cache_checksum'] = bchecksum
    # Add original install prefix relative to layout root to spec.yaml.
    # This will be used to determine is the directory layout has changed.
    buildinfo = {}
    buildinfo['relative_prefix'] = os.path.relpath(
        spec.prefix, spack.store.layout.root)
    spec_dict['buildinfo'] = buildinfo
    spec_dict['full_hash'] = spec.full_hash()

    tty.debug('The full_hash ({0}) of {1} will be written into {2}'.format(
        spec_dict['full_hash'], spec.name, specfile_path))
    tty.debug(spec.tree())

    with open(specfile_path, 'w') as outfile:
        outfile.write(syaml.dump(spec_dict))

    # sign the tarball and spec file with gpg
    if not unsigned:
        sign_tarball(key, force, specfile_path)
    # put tarball, spec and signature files in .spack archive
    with closing(tarfile.open(spackfile_path, 'w')) as tar:
        tar.add(name='%s' % tarfile_path, arcname='%s' % tarfile_name)
        tar.add(name='%s' % specfile_path, arcname='%s' % specfile_name)
        if not unsigned:
            tar.add(name='%s.asc' % specfile_path,
                    arcname='%s.asc' % specfile_name)

    # cleanup file moved to archive
    os.remove(tarfile_path)
    if not unsigned:
        os.remove('%s.asc' % specfile_path)

    # create an index.html for the build_cache directory so specs can be found
    if regenerate_index:
        generate_package_index(build_cache_dir)

    return None
Exemplo n.º 16
0
def test_ci_generate_for_pr_pipeline(tmpdir, mutable_mock_env_path,
                                     env_deactivate, install_mockery,
                                     mock_packages, monkeypatch):
    """Test that PR pipelines do not include a final stage job for
    rebuilding the mirror index, even if that job is specifically
    configured"""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  specs:
    - flatten-deps
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    enable-artifacts-buildcache: True
    mappings:
      - match:
          - flatten-deps
        runner-attributes:
          tags:
            - donotcare
      - match:
          - dependency-install
        runner-attributes:
          tags:
            - donotcare
    service-job-attributes:
      image: donotcare
      tags: [donotcare]
    rebuild-index: False
""")

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            os.environ['SPACK_IS_PR_PIPELINE'] = 'True'
            os.environ['SPACK_PR_BRANCH'] = 'fake-test-branch'
            monkeypatch.setattr(ci, 'SPACK_PR_MIRRORS_ROOT_URL',
                                r"file:///fake/mirror")
            try:
                ci_cmd('generate', '--output-file', outputfile)
            finally:
                del os.environ['SPACK_IS_PR_PIPELINE']
                del os.environ['SPACK_PR_BRANCH']

        with open(outputfile) as f:
            contents = f.read()
            print('generated contents: ')
            print(contents)
            yaml_contents = syaml.load(contents)

            assert ('rebuild-index' not in yaml_contents)

            for ci_key in yaml_contents.keys():
                if ci_key.startswith('(specs) '):
                    job_object = yaml_contents[ci_key]
                    job_vars = job_object['variables']
                    assert ('SPACK_IS_PR_PIPELINE' in job_vars)
                    assert (job_vars['SPACK_IS_PR_PIPELINE'] == 'True')
Exemplo n.º 17
0
def test_ci_generate_bootstrap_prune_dag(install_mockery_mutable_config,
                                         mock_packages, mock_fetch,
                                         mock_archive, mutable_config,
                                         monkeypatch, tmpdir,
                                         mutable_mock_env_path,
                                         env_deactivate):
    """Test compiler bootstrapping with DAG pruning.  Specifically, make
       sure that if we detect the bootstrapped compiler needs to be rebuilt,
       we ensure the spec we want to build with that compiler is scheduled
       for rebuild as well."""

    # Create a temp mirror directory for buildcache usage
    mirror_dir = tmpdir.join('mirror_dir')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    # Install a compiler, because we want to put it in a buildcache
    install_cmd('[email protected]%[email protected]')

    # Put installed compiler in the buildcache
    buildcache_cmd('create', '-u', '-a', '-f', '-d', mirror_dir.strpath,
                   '[email protected]%[email protected]')

    # Now uninstall the compiler
    uninstall_cmd('-y', '[email protected]%[email protected]')

    monkeypatch.setattr(spack.concretize.Concretizer,
                        'check_for_compiler_existence', False)
    spack.config.set('config:install_missing_compilers', True)
    assert CompilerSpec('[email protected]') not in compilers.all_compiler_specs()

    # Configure the mirror where we put that buildcache w/ the compiler
    mirror_cmd('add', 'test-mirror', mirror_url)

    install_cmd('--no-check-signature', 'a%[email protected]')

    # Put spec built with installed compiler in the buildcache
    buildcache_cmd('create', '-u', '-a', '-f', '-d', mirror_dir.strpath,
                   'a%[email protected]')

    # Now uninstall the spec
    uninstall_cmd('-y', 'a%[email protected]')

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  definitions:
    - bootstrap:
      - [email protected]%[email protected]
  specs:
    - a%[email protected]
  mirrors:
    atestm: {0}
  gitlab-ci:
    bootstrap:
      - name: bootstrap
        compiler-agnostic: true
    mappings:
      - match:
          - arch=test-debian6-x86_64
        runner-attributes:
          tags:
            - donotcare
      - match:
          - arch=test-debian6-core2
        runner-attributes:
          tags:
            - meh
""".format(mirror_url))

    # Without this monkeypatch, pipeline generation process would think that
    # nothing in the environment needs rebuilding.  With the monkeypatch, the
    # process sees the compiler as needing a rebuild, which should then result
    # in the specs built with that compiler needing a rebuild too.
    def fake_get_mirrors_for_spec(spec=None,
                                  full_hash_match=False,
                                  mirrors_to_check=None,
                                  index_only=False):
        if spec.name == 'gcc':
            return []
        else:
            return [{
                'spec': spec,
                'mirror_url': mirror_url,
            }]

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            monkeypatch.setattr(ci, 'SPACK_PR_MIRRORS_ROOT_URL',
                                r"file:///fake/mirror")

            ci_cmd('generate', '--output-file', outputfile)

            with open(outputfile) as of:
                yaml_contents = of.read()
                original_yaml_contents = syaml.load(yaml_contents)

            # without the monkeypatch, everything appears up to date and no
            # rebuild jobs are generated.
            assert (original_yaml_contents)
            assert ('no-specs-to-rebuild' in original_yaml_contents)

            monkeypatch.setattr(spack.binary_distribution,
                                'get_mirrors_for_spec',
                                fake_get_mirrors_for_spec)

            ci_cmd('generate', '--output-file', outputfile)

            with open(outputfile) as of:
                yaml_contents = of.read()
                new_yaml_contents = syaml.load(yaml_contents)

            assert (new_yaml_contents)

            # This 'needs' graph reflects that even though specs 'a' and 'b' do
            # not otherwise need to be rebuilt (thanks to DAG pruning), they
            # both end up in the generated pipeline because the compiler they
            # depend on is bootstrapped, and *does* need to be rebuilt.
            needs_graph = {
                '(bootstrap) gcc': [],
                '(specs) b': [
                    '(bootstrap) gcc',
                ],
                '(specs) a': [
                    '(bootstrap) gcc',
                    '(specs) b',
                ],
            }

            _validate_needs_graph(new_yaml_contents, needs_graph, False)
Exemplo n.º 18
0
def test_ci_generate_debug_with_custom_spack(tmpdir, mutable_mock_env_path,
                                             env_deactivate, install_mockery,
                                             mock_packages):
    """Make sure we generate cloning of spack in job script if needed"""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  specs:
    - archive-files
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    enable-artifacts-buildcache: True
    enable-debug-messages: True
    mappings:
      - match:
          - archive-files
        runner-attributes:
          tags:
            - donotcare
          image: donotcare
""")

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            spack_repo = 'https://github.com/usera/spack.git'
            spack_ref = 'custom-branch'
            expected_clone_str = 'git clone "{0}"'.format(spack_repo)

            ci_cmd('generate', '--output-file', outfile, '--spack-repo',
                   spack_repo, '--spack-ref', spack_ref)

            with open(outfile) as f:
                contents = f.read()
                yaml_contents = syaml.load(contents)
                for ci_key in yaml_contents.keys():
                    if '(specs)' in ci_key:
                        next_job = yaml_contents[ci_key]
                        print(next_job)
                        assert ('before_script' in next_job)
                        before_script = next_job['before_script']
                        for step in before_script:
                            if expected_clone_str in step:
                                break
                        else:
                            msg = 'job "{0}" did not clone spack repo'.format(
                                ci_key)
                            print(msg)
                            assert (False)

                        assert ('script' in next_job)
                        script = next_job['script']
                        for step in script:
                            if 'spack -d ci rebuild' in step:
                                break
                        else:
                            msg = 'job {0} missing rebuild command'.format(
                                ci_key)
                            print(msg)
                            assert (False)
Exemplo n.º 19
0
 def from_yaml(stream):
     try:
         yfile = syaml.load(stream)
     except MarkedYAMLError, e:
         raise spack.spec.SpackYAMLError(
             "error parsing YAML ProviderIndex cache:", str(e))
Exemplo n.º 20
0
def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,
                                           env_deactivate, install_mockery,
                                           mock_packages, monkeypatch):
    """Test that we get the behavior we want with respect to the provision
       of runner attributes like tags, variables, and scripts, both when we
       inherit them from the top level, as well as when we override one or
       more at the runner level"""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  specs:
    - flatten-deps
    - a
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    tags:
      - toplevel
    variables:
      ONE: toplevelvarone
      TWO: toplevelvartwo
    before_script:
      - pre step one
      - pre step two
    script:
      - main step
    after_script:
      - post step one
    mappings:
      - match:
          - flatten-deps
        runner-attributes:
          tags:
            - specific-one
          variables:
            THREE: specificvarthree
      - match:
          - dependency-install
      - match:
          - a
        runner-attributes:
          tags:
            - specific-a
            - toplevel
          variables:
            ONE: specificvarone
            TWO: specificvartwo
          before_script:
            - custom pre step one
          script:
            - custom main step
          after_script:
            - custom post step one
    final-stage-rebuild-index:
      image: donotcare
      tags: [donotcare]
""")

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            monkeypatch.setattr(spack.main, 'get_version',
                                lambda: '0.15.3-416-12ad69eb1')
            ci_cmd('generate', '--output-file', outputfile)

        with open(outputfile) as f:
            contents = f.read()
            print('generated contents: ')
            print(contents)
            yaml_contents = syaml.load(contents)

            assert ('variables' in yaml_contents)
            global_vars = yaml_contents['variables']
            assert ('SPACK_VERSION' in global_vars)
            assert (global_vars['SPACK_VERSION'] == '0.15.3-416-12ad69eb1')
            assert ('SPACK_CHECKOUT_VERSION' in global_vars)
            assert (global_vars['SPACK_CHECKOUT_VERSION'] == '12ad69eb1')

            for ci_key in yaml_contents.keys():
                if '(specs) b' in ci_key:
                    print('Should not have staged "b" w/out a match')
                    assert (False)
                if '(specs) a' in ci_key:
                    # Make sure a's attributes override variables, and all the
                    # scripts.  Also, make sure the 'toplevel' tag doesn't
                    # appear twice, but that a's specific extra tag does appear
                    the_elt = yaml_contents[ci_key]
                    assert (the_elt['variables']['ONE'] == 'specificvarone')
                    assert (the_elt['variables']['TWO'] == 'specificvartwo')
                    assert ('THREE' not in the_elt['variables'])
                    assert (len(the_elt['tags']) == 2)
                    assert ('specific-a' in the_elt['tags'])
                    assert ('toplevel' in the_elt['tags'])
                    assert (len(the_elt['before_script']) == 1)
                    assert (
                        the_elt['before_script'][0] == 'custom pre step one')
                    assert (len(the_elt['script']) == 1)
                    assert (the_elt['script'][0] == 'custom main step')
                    assert (len(the_elt['after_script']) == 1)
                    assert (
                        the_elt['after_script'][0] == 'custom post step one')
                if '(specs) dependency-install' in ci_key:
                    # Since the dependency-install match omits any
                    # runner-attributes, make sure it inherited all the
                    # top-level attributes.
                    the_elt = yaml_contents[ci_key]
                    assert (the_elt['variables']['ONE'] == 'toplevelvarone')
                    assert (the_elt['variables']['TWO'] == 'toplevelvartwo')
                    assert ('THREE' not in the_elt['variables'])
                    assert (len(the_elt['tags']) == 1)
                    assert (the_elt['tags'][0] == 'toplevel')
                    assert (len(the_elt['before_script']) == 2)
                    assert (the_elt['before_script'][0] == 'pre step one')
                    assert (the_elt['before_script'][1] == 'pre step two')
                    assert (len(the_elt['script']) == 1)
                    assert (the_elt['script'][0] == 'main step')
                    assert (len(the_elt['after_script']) == 1)
                    assert (the_elt['after_script'][0] == 'post step one')
                if '(specs) flatten-deps' in ci_key:
                    # The flatten-deps match specifies that we keep the two
                    # top level variables, but add a third specifc one.  It
                    # also adds a custom tag which should be combined with
                    # the top-level tag.
                    the_elt = yaml_contents[ci_key]
                    assert (the_elt['variables']['ONE'] == 'toplevelvarone')
                    assert (the_elt['variables']['TWO'] == 'toplevelvartwo')
                    assert (
                        the_elt['variables']['THREE'] == 'specificvarthree')
                    assert (len(the_elt['tags']) == 2)
                    assert ('specific-one' in the_elt['tags'])
                    assert ('toplevel' in the_elt['tags'])
                    assert (len(the_elt['before_script']) == 2)
                    assert (the_elt['before_script'][0] == 'pre step one')
                    assert (the_elt['before_script'][1] == 'pre step two')
                    assert (len(the_elt['script']) == 1)
                    assert (the_elt['script'][0] == 'main step')
                    assert (len(the_elt['after_script']) == 1)
                    assert (the_elt['after_script'][0] == 'post step one')
Exemplo n.º 21
0
def view(parser, args):
    'Produce a view of a set of packages.'

    specs = spack.cmd.parse_specs(args.specs)
    path = args.path[0]

    if args.action in actions_link and args.projection_file:
        # argparse confirms file exists
        with open(args.projection_file, 'r') as f:
            projections_data = s_yaml.load(f)
            validate(projections_data, spack.schema.projections.schema)
            ordered_projections = projections_data['projections']
    else:
        ordered_projections = {}

    # What method are we using for this view
    if args.action in ("hardlink", "hard"):
        link_fn = view_hardlink
    elif args.action in ("copy", "relocate"):
        link_fn = view_copy
    else:
        link_fn = view_symlink

    view = YamlFilesystemView(path,
                              spack.store.layout,
                              projections=ordered_projections,
                              ignore_conflicts=getattr(args,
                                                       "ignore_conflicts",
                                                       False),
                              link=link_fn,
                              verbose=args.verbose)

    # Process common args and specs
    if getattr(args, "all", False):
        specs = view.get_all_specs()
        if len(specs) == 0:
            tty.warn("Found no specs in %s" % path)

    elif args.action in actions_link:
        # only link commands need to disambiguate specs
        env = ev.get_env(args, 'view link')
        specs = [spack.cmd.disambiguate_spec(s, env) for s in specs]

    elif args.action in actions_status:
        # no specs implies all
        if len(specs) == 0:
            specs = view.get_all_specs()
        else:
            specs = disambiguate_in_view(specs, view)

    else:
        # status and remove can map a partial spec to packages in view
        specs = disambiguate_in_view(specs, view)

    with_dependencies = args.dependencies.lower() in ['true', 'yes']

    # Map action to corresponding functionality
    if args.action in actions_link:
        try:
            view.add_specs(*specs,
                           with_dependencies=with_dependencies,
                           exclude=args.exclude)
        except MergeConflictError:
            tty.info("Some file blocked the merge, adding the '-i' flag will "
                     "ignore this conflict. For more information see e.g. "
                     "https://github.com/spack/spack/issues/9029")
            raise

    elif args.action in actions_remove:
        view.remove_specs(*specs,
                          with_dependencies=with_dependencies,
                          exclude=args.exclude,
                          with_dependents=not args.no_remove_dependents)

    elif args.action in actions_status:
        view.print_status(*specs, with_dependencies=with_dependencies)

    else:
        tty.error('Unknown action: "%s"' % args.action)
Exemplo n.º 22
0
 def from_yaml(stream):
     try:
         yfile = syaml.load(stream)
     except MarkedYAMLError, e:
         raise spack.spec.SpackYAMLError(
             "error parsing YAML ProviderIndex cache:", str(e))
Exemplo n.º 23
0
 def from_yaml(stream, name=None):
     try:
         data = syaml.load(stream)
         return Mirror.from_dict(data, name)
     except yaml_error.MarkedYAMLError as e:
         raise syaml.SpackYAMLError("error parsing YAML spec:", str(e))
Exemplo n.º 24
0
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
                              install_mockery, mock_packages, mock_fetch,
                              mock_stage, mock_gnupghome):
    working_dir = tmpdir.join('working_dir')

    mirror_dir = working_dir.join('mirror')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    signing_key_dir = spack_paths.mock_gpg_keys_path
    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
    with open(signing_key_path) as fd:
        signing_key = fd.read()

    ci.import_signing_key(signing_key)

    spack_yaml_contents = """
spack:
 definitions:
   - packages: [patchelf]
 specs:
   - $packages
 mirrors:
   test-mirror: {0}
 gitlab-ci:
   enable-artifacts-buildcache: True
   mappings:
     - match:
         - patchelf
       runner-attributes:
         tags:
           - donotcare
         image: donotcare
   service-job-attributes:
     tags:
       - nonbuildtag
     image: basicimage
""".format(mirror_url)

    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write(spack_yaml_contents)

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        with ev.read('test') as env:
            spec_map = ci.get_concrete_specs('patchelf', 'patchelf', '',
                                             'FIND_ANY')
            concrete_spec = spec_map['patchelf']
            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
            yaml_path = str(tmpdir.join('spec.yaml'))
            with open(yaml_path, 'w') as ypfd:
                ypfd.write(spec_yaml)

            install_cmd('--keep-stage', yaml_path)

            # env, spec, yaml_path, mirror_url, build_id, sign_binaries
            ci.push_mirror_contents(env, concrete_spec, yaml_path, mirror_url,
                                    '42', True)

            buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')

            # Now test the --prune-dag (default) option of spack ci generate
            mirror_cmd('add', 'test-ci', mirror_url)

            outputfile_pruned = str(tmpdir.join('pruned_pipeline.yml'))
            ci_cmd('generate', '--output-file', outputfile_pruned)

            with open(outputfile_pruned) as f:
                contents = f.read()
                yaml_contents = syaml.load(contents)
                assert ('no-specs-to-rebuild' in yaml_contents)
                # Make sure there are no other spec jobs or rebuild-index
                assert (len(yaml_contents.keys()) == 1)
                the_elt = yaml_contents['no-specs-to-rebuild']
                assert ('tags' in the_elt)
                assert ('nonbuildtag' in the_elt['tags'])
                assert ('image' in the_elt)
                assert (the_elt['image'] == 'basicimage')

            outputfile_not_pruned = str(tmpdir.join('unpruned_pipeline.yml'))
            ci_cmd('generate', '--no-prune-dag', '--output-file',
                   outputfile_not_pruned)

            # Test the --no-prune-dag option of spack ci generate
            with open(outputfile_not_pruned) as f:
                contents = f.read()
                yaml_contents = syaml.load(contents)

                found_spec_job = False

                for ci_key in yaml_contents.keys():
                    if '(specs) patchelf' in ci_key:
                        the_elt = yaml_contents[ci_key]
                        assert ('variables' in the_elt)
                        job_vars = the_elt['variables']
                        assert ('SPACK_SPEC_NEEDS_REBUILD' in job_vars)
                        assert (
                            job_vars['SPACK_SPEC_NEEDS_REBUILD'] == 'False')
                        found_spec_job = True

                assert (found_spec_job)

            mirror_cmd('rm', 'test-ci')

            # Test generating buildcache index while we have bin mirror
            buildcache_cmd('update-index', '--mirror-url', mirror_url)
            index_path = os.path.join(buildcache_path, 'index.json')
            with open(index_path) as idx_fd:
                index_object = json.load(idx_fd)
                validate(index_object, db_idx_schema)

            # Now that index is regenerated, validate "buildcache list" output
            buildcache_list_output = buildcache_cmd('list', output=str)
            assert ('patchelf' in buildcache_list_output)

            # Also test buildcache_spec schema
            bc_files_list = os.listdir(buildcache_path)
            for file_name in bc_files_list:
                if file_name.endswith('.spec.yaml'):
                    spec_yaml_path = os.path.join(buildcache_path, file_name)
                    with open(spec_yaml_path) as yaml_fd:
                        yaml_object = syaml.load(yaml_fd)
                        validate(yaml_object, spec_yaml_schema)

            logs_dir = working_dir.join('logs_dir')
            if not os.path.exists(logs_dir.strpath):
                os.makedirs(logs_dir.strpath)

            ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath)

            logs_dir_list = os.listdir(logs_dir.strpath)

            assert ('spack-build-out.txt' in logs_dir_list)

            # Also just make sure that if something goes wrong with the
            # stage logs copy, no exception is thrown
            ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)

            dl_dir = working_dir.join('download_dir')
            if not os.path.exists(dl_dir.strpath):
                os.makedirs(dl_dir.strpath)

            buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
                           dl_dir.strpath, '--require-cdashid')

            dl_dir_list = os.listdir(dl_dir.strpath)

            assert (len(dl_dir_list) == 3)
Exemplo n.º 25
0
def release_jobs(parser, args):
    share_path = os.path.join(spack_root, 'share', 'spack', 'docker')
    os_container_mapping_path = os.path.join(
        share_path, 'os-container-mapping.yaml')

    with open(os_container_mapping_path, 'r') as fin:
        os_container_mapping = syaml.load(fin)

    try:
        validate(os_container_mapping, mapping_schema)
    except ValidationError as val_err:
        tty.error('Ill-formed os-container-mapping configuration object')
        tty.error(os_container_mapping)
        tty.debug(val_err)
        return

    containers = os_container_mapping['containers']

    if args.specs:
        # Just print out the spec labels and all dependency edges in
        # a json format.
        spec_list = [Spec(s) for s in args.specs]
        with open(args.specs_deps_output, 'w') as out:
            compute_spec_deps(spec_list, out)
        return

    current_system = sys_type() if args.resolve_deps_locally else None

    release_specs_path = args.spec_set
    if not release_specs_path:
        raise SpackError('Must provide path to release spec-set')

    release_spec_set = CombinatorialSpecSet.from_file(release_specs_path)

    mirror_url = args.mirror_url

    if not mirror_url:
        raise SpackError('Must provide url of target binary mirror')

    cdash_url = args.cdash_url

    spec_labels, dependencies, stages = stage_spec_jobs(
        release_spec_set, containers, current_system)

    if not stages:
        tty.msg('No jobs staged, exiting.')
        return

    if args.print_summary:
        print_staging_summary(spec_labels, dependencies, stages)

    output_object = {}
    job_count = 0

    stage_names = ['stage-{0}'.format(i) for i in range(len(stages))]
    stage = 0

    for stage_jobs in stages:
        stage_name = stage_names[stage]

        for spec_label in stage_jobs:
            release_spec = spec_labels[spec_label]['spec']
            root_spec = spec_labels[spec_label]['rootSpec']

            pkg_compiler = release_spec.compiler
            pkg_hash = release_spec.dag_hash()

            osname = str(release_spec.architecture)
            job_name = get_job_name(release_spec, osname)
            container_info = containers[osname]
            build_image = container_info['image']

            job_scripts = ['./bin/rebuild-package.sh']

            if 'setup_script' in container_info:
                job_scripts.insert(
                    0, container_info['setup_script'] % pkg_compiler)

            job_dependencies = []
            if spec_label in dependencies:
                job_dependencies = (
                    [get_job_name(spec_labels[dep_label]['spec'], osname)
                        for dep_label in dependencies[spec_label]])

            job_object = {
                'stage': stage_name,
                'variables': {
                    'MIRROR_URL': mirror_url,
                    'CDASH_BASE_URL': cdash_url,
                    'HASH': pkg_hash,
                    'DEPENDENCIES': ';'.join(job_dependencies),
                    'ROOT_SPEC': str(root_spec),
                },
                'script': job_scripts,
                'image': build_image,
                'artifacts': {
                    'paths': [
                        'local_mirror/build_cache',
                        'jobs_scratch_dir',
                        'cdash_report',
                    ],
                    'when': 'always',
                },
                'dependencies': job_dependencies,
            }

            # If we see 'compilers' in the container iformation, it's a
            # filter for the compilers this container can handle, else we
            # assume it can handle any compiler
            if 'compilers' in container_info:
                do_job = False
                for item in container_info['compilers']:
                    container_compiler_spec = CompilerSpec(item['name'])
                    if pkg_compiler == container_compiler_spec:
                        do_job = True
            else:
                do_job = True

            if args.shared_runner_tag:
                job_object['tags'] = [args.shared_runner_tag]

            if args.signing_key:
                job_object['variables']['SIGN_KEY_HASH'] = args.signing_key

            if do_job:
                output_object[job_name] = job_object
                job_count += 1

        stage += 1

    tty.msg('{0} build jobs generated in {1} stages'.format(
        job_count, len(stages)))

    final_stage = 'stage-rebuild-index'

    final_job = {
        'stage': final_stage,
        'variables': {
            'MIRROR_URL': mirror_url,
        },
        'image': build_image,
        'script': './bin/rebuild-index.sh',
    }

    if args.shared_runner_tag:
        final_job['tags'] = [args.shared_runner_tag]

    output_object['rebuild-index'] = final_job
    stage_names.append(final_stage)
    output_object['stages'] = stage_names

    with open(args.output_file, 'w') as outf:
        outf.write(syaml.dump(output_object))
Exemplo n.º 26
0
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
    if not spec.concrete:
        raise ValueError('spec must be concrete to check against mirror')

    pkg_name = spec.name
    pkg_version = spec.version

    pkg_hash = spec.dag_hash()
    pkg_full_hash = spec.full_hash()

    tty.debug('Checking {0}-{1}, dag_hash = {2}, full_hash = {3}'.format(
        pkg_name, pkg_version, pkg_hash, pkg_full_hash))
    tty.debug(spec.tree())

    # Try to retrieve the .spec.yaml directly, based on the known
    # format of the name, in order to determine if the package
    # needs to be rebuilt.
    cache_prefix = build_cache_prefix(mirror_url)
    spec_yaml_file_name = tarball_name(spec, '.spec.yaml')
    file_path = os.path.join(cache_prefix, spec_yaml_file_name)

    result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
        spec.short_spec, '' if rebuild_on_errors else 'not ')

    try:
        _, _, yaml_file = web_util.read_from_url(file_path)
        yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
    except (URLError, web_util.SpackWebError) as url_err:
        err_msg = [
            'Unable to determine whether {0} needs rebuilding,',
            ' caught exception attempting to read from {1}.',
        ]
        tty.error(''.join(err_msg).format(spec.short_spec, file_path))
        tty.debug(url_err)
        tty.warn(result_of_error)
        return rebuild_on_errors

    if not yaml_contents:
        tty.error('Reading {0} returned nothing'.format(file_path))
        tty.warn(result_of_error)
        return rebuild_on_errors

    spec_yaml = syaml.load(yaml_contents)

    # If either the full_hash didn't exist in the .spec.yaml file, or it
    # did, but didn't match the one we computed locally, then we should
    # just rebuild.  This can be simplified once the dag_hash and the
    # full_hash become the same thing.
    if ('full_hash' not in spec_yaml or
            spec_yaml['full_hash'] != pkg_full_hash):
        if 'full_hash' in spec_yaml:
            reason = 'hash mismatch, remote = {0}, local = {1}'.format(
                spec_yaml['full_hash'], pkg_full_hash)
        else:
            reason = 'full_hash was missing from remote spec.yaml'
        tty.msg('Rebuilding {0}, reason: {1}'.format(
            spec.short_spec, reason))
        tty.msg(spec.tree())
        return True

    return False
Exemplo n.º 27
0
def test_ci_generate_with_env(tmpdir, mutable_mock_env_path, env_deactivate,
                              install_mockery, mock_packages):
    """Make sure we can get a .gitlab-ci.yml from an environment file
       which has the gitlab-ci, cdash, and mirrors sections."""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  definitions:
    - bootstrap:
      - [email protected]
    - old-gcc-pkgs:
      - archive-files
      - callpath
      # specify ^openblas-with-lapack to ensure that builtin.mock repo flake8
      # package (which can also provide lapack) is not chosen, as it violates
      # a package-level check which requires exactly one fetch strategy (this
      # is apparently not an issue for other tests that use it).
      - [email protected] ^openblas-with-lapack
  specs:
    - matrix:
      - [$old-gcc-pkgs]
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    bootstrap:
      - name: bootstrap
        compiler-agnostic: true
    mappings:
      - match:
          - arch=test-debian6-core2
        runner-attributes:
          tags:
            - donotcare
          image: donotcare
    final-stage-rebuild-index:
      image: donotcare
      tags: [donotcare]
  cdash:
    build-group: Not important
    url: https://my.fake.cdash
    project: Not used
    site: Nothing
""")
    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            ci_cmd('generate', '--output-file', outputfile)

        with open(outputfile) as f:
            contents = f.read()
            yaml_contents = syaml.load(contents)
            found_spec = False
            for ci_key in yaml_contents.keys():
                if '(bootstrap)' in ci_key:
                    found_spec = True
                    assert ('cmake' in ci_key)
            assert (found_spec)
            assert ('stages' in yaml_contents)
            assert (len(yaml_contents['stages']) == 6)
            assert (yaml_contents['stages'][0] == 'stage-0')
            assert (yaml_contents['stages'][5] == 'stage-rebuild-index')
Exemplo n.º 28
0
def test_ci_generate_with_custom_scripts(tmpdir, mutable_mock_env_path,
                                         env_deactivate, install_mockery,
                                         mock_packages, monkeypatch):
    """Test use of user-provided scripts"""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  specs:
    - archive-files
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    mappings:
      - match:
          - archive-files
        runner-attributes:
          tags:
            - donotcare
          variables:
            ONE: plain-string-value
            TWO: ${INTERP_ON_BUILD}
          before_script:
            - mkdir /some/path
            - pushd /some/path
            - git clone ${SPACK_REPO}
            - cd spack
            - git checkout ${SPACK_REF}
            - popd
          script:
            - spack -d ci rebuild
          after_script:
            - rm -rf /some/path/spack
""")

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            monkeypatch.setattr(spack.main, 'get_version', lambda: '0.15.3')
            ci_cmd('generate', '--output-file', outputfile)

            with open(outputfile) as f:
                contents = f.read()
                yaml_contents = syaml.load(contents)

                found_it = False

                assert ('variables' in yaml_contents)
                global_vars = yaml_contents['variables']
                assert ('SPACK_VERSION' in global_vars)
                assert (global_vars['SPACK_VERSION'] == '0.15.3')
                assert ('SPACK_CHECKOUT_VERSION' in global_vars)
                assert (global_vars['SPACK_CHECKOUT_VERSION'] == 'v0.15.3')

                for ci_key in yaml_contents.keys():
                    ci_obj = yaml_contents[ci_key]
                    if 'archive-files' in ci_key:
                        # Ensure we have variables, possibly interpolated
                        assert ('variables' in ci_obj)
                        var_d = ci_obj['variables']
                        assert ('ONE' in var_d)
                        assert (var_d['ONE'] == 'plain-string-value')
                        assert ('TWO' in var_d)
                        assert (var_d['TWO'] == '${INTERP_ON_BUILD}')

                        # Ensure we have scripts verbatim
                        assert ('before_script' in ci_obj)
                        before_script = ci_obj['before_script']
                        assert (before_script[0] == 'mkdir /some/path')
                        assert (before_script[1] == 'pushd /some/path')
                        assert (before_script[2] == 'git clone ${SPACK_REPO}')
                        assert (before_script[3] == 'cd spack')
                        assert (
                            before_script[4] == 'git checkout ${SPACK_REF}')
                        assert (before_script[5] == 'popd')

                        assert ('script' in ci_obj)
                        assert (ci_obj['script'][0] == 'spack -d ci rebuild')

                        assert ('after_script' in ci_obj)
                        after_script = ci_obj['after_script'][0]
                        assert (after_script == 'rm -rf /some/path/spack')

                        found_it = True

            assert (found_it)
Exemplo n.º 29
0
def extract_tarball(spec,
                    filename,
                    allow_root=False,
                    unsigned=False,
                    force=False):
    """
    extract binary tarball for given package into install area
    """
    if os.path.exists(spec.prefix):
        if force:
            shutil.rmtree(spec.prefix)
        else:
            raise NoOverwriteException(str(spec.prefix))

    tmpdir = tempfile.mkdtemp()
    stagepath = os.path.dirname(filename)
    spackfile_name = tarball_name(spec, '.spack')
    spackfile_path = os.path.join(stagepath, spackfile_name)
    tarfile_name = tarball_name(spec, '.tar.bz2')
    tarfile_path = os.path.join(tmpdir, tarfile_name)
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.join(tmpdir, specfile_name)

    with closing(tarfile.open(spackfile_path, 'r')) as tar:
        tar.extractall(tmpdir)
    # older buildcache tarfiles use gzip compression
    if not os.path.exists(tarfile_path):
        tarfile_name = tarball_name(spec, '.tar.gz')
        tarfile_path = os.path.join(tmpdir, tarfile_name)
    if not unsigned:
        if os.path.exists('%s.asc' % specfile_path):
            try:
                suppress = config.get('config:suppress_gpg_warnings', False)
                Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
            except Exception as e:
                shutil.rmtree(tmpdir)
                tty.die(e)
        else:
            shutil.rmtree(tmpdir)
            raise NoVerifyException(
                "Package spec file failed signature verification.\n"
                "Use spack buildcache keys to download "
                "and install a key for verification from the mirror.")
    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # get the sha256 checksum recorded at creation
    spec_dict = {}
    with open(specfile_path, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = spec_dict['binary_cache_checksum']

    # if the checksums don't match don't install
    if bchecksum['hash'] != checksum:
        shutil.rmtree(tmpdir)
        raise NoChecksumException(
            "Package tarball failed checksum verification.\n"
            "It cannot be installed.")

    new_relative_prefix = str(
        os.path.relpath(spec.prefix, spack.store.layout.root))
    # if the original relative prefix is in the spec file use it
    buildinfo = spec_dict.get('buildinfo', {})
    old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
    # if the original relative prefix and new relative prefix differ the
    # directory layout has changed and the  buildcache cannot be installed
    if old_relative_prefix != new_relative_prefix:
        shutil.rmtree(tmpdir)
        msg = "Package tarball was created from an install "
        msg += "prefix with a different directory layout.\n"
        msg += "It cannot be relocated."
        raise NewLayoutException(msg)

    # extract the tarball in a temp directory
    with closing(tarfile.open(tarfile_path, 'r')) as tar:
        tar.extractall(path=tmpdir)
    # the base of the install prefix is used when creating the tarball
    # so the pathname should be the same now that the directory layout
    # is confirmed
    workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
    # install_tree copies hardlinks
    # create a temporary tarfile from prefix and exract it to workdir
    # tarfile preserves hardlinks
    temp_tarfile_name = tarball_name(spec, '.tar')
    temp_tarfile_path = os.path.join(tmpdir, temp_tarfile_name)
    with closing(tarfile.open(temp_tarfile_path, 'w')) as tar:
        tar.add(name='%s' % workdir, arcname='.')
    with closing(tarfile.open(temp_tarfile_path, 'r')) as tar:
        tar.extractall(spec.prefix)
    os.remove(temp_tarfile_path)

    # cleanup
    os.remove(tarfile_path)
    os.remove(specfile_path)

    try:
        relocate_package(spec.prefix, spec, allow_root)
    except Exception as e:
        shutil.rmtree(spec.prefix)
        tty.die(e)
    else:
        manifest_file = os.path.join(spec.prefix,
                                     spack.store.layout.metadata_dir,
                                     spack.store.layout.manifest_file_name)
        if not os.path.exists(manifest_file):
            spec_id = spec.format('{name}/{hash:7}')
            tty.warn('No manifest file in tarball for spec %s' % spec_id)
    finally:
        shutil.rmtree(tmpdir)
Exemplo n.º 30
0
#! /usr/bin/python3

import sys
import os
import os.path
from spack.util.spack_yaml import load, dump

if sys.platform != "linux":
    print("This script supports only Linux")
    sys.exit(1)

fn = os.environ["HOME"] + "/.spack/linux/compilers.yaml"

with open(fn, "rb") as f:
    data = load(f)

compilers = data['compilers']

for comp in compilers:
    comp = comp['compiler']
    paths = comp['paths']
    prefixes = []
    for bin, binpath in paths.items():
        binpath = os.path.dirname(binpath)
        if os.path.basename(binpath) == 'bin':
            binpath = os.path.dirname(binpath)
            prefixes.append(binpath)
    prefix = os.path.commonprefix(prefixes)
    if prefix.startswith("/usr") or prefix == "/":
        continue
    ld_paths = os.environ.get("LD_LIBRARY_PATH", "")
Exemplo n.º 31
0
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
                    force=False):
    """
    extract binary tarball for given package into install area
    """
    if os.path.exists(spec.prefix):
        if force:
            shutil.rmtree(spec.prefix)
        else:
            raise NoOverwriteException(str(spec.prefix))

    tmpdir = tempfile.mkdtemp()
    stagepath = os.path.dirname(filename)
    spackfile_name = tarball_name(spec, '.spack')
    spackfile_path = os.path.join(stagepath, spackfile_name)
    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_path = os.path.join(tmpdir, tarfile_name)
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.join(tmpdir, specfile_name)

    with closing(tarfile.open(spackfile_path, 'r')) as tar:
        tar.extractall(tmpdir)
    if not unsigned:
        if os.path.exists('%s.asc' % specfile_path):
            try:
                Gpg.verify('%s.asc' % specfile_path, specfile_path)
            except Exception as e:
                shutil.rmtree(tmpdir)
                tty.die(str(e))
        else:
            shutil.rmtree(tmpdir)
            raise NoVerifyException(
                "Package spec file failed signature verification.\n"
                "Use spack buildcache keys to download "
                "and install a key for verification from the mirror.")
    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # get the sha256 checksum recorded at creation
    spec_dict = {}
    with open(specfile_path, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = spec_dict['binary_cache_checksum']

    # if the checksums don't match don't install
    if bchecksum['hash'] != checksum:
        shutil.rmtree(tmpdir)
        raise NoChecksumException(
            "Package tarball failed checksum verification.\n"
            "It cannot be installed.")

    new_relative_prefix = str(os.path.relpath(spec.prefix,
                                              spack.store.layout.root))
    # if the original relative prefix is in the spec file use it
    buildinfo = spec_dict.get('buildinfo', {})
    old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
    # if the original relative prefix and new relative prefix differ the
    # directory layout has changed and the  buildcache cannot be installed
    if old_relative_prefix != new_relative_prefix:
        shutil.rmtree(tmpdir)
        msg = "Package tarball was created from an install "
        msg += "prefix with a different directory layout.\n"
        msg += "It cannot be relocated."
        raise NewLayoutException(msg)

    # extract the tarball in a temp directory
    with closing(tarfile.open(tarfile_path, 'r')) as tar:
        tar.extractall(path=tmpdir)
    # the base of the install prefix is used when creating the tarball
    # so the pathname should be the same now that the directory layout
    # is confirmed
    workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))

    # cleanup
    os.remove(tarfile_path)
    os.remove(specfile_path)

    try:
        relocate_package(workdir, allow_root)
    except Exception as e:
        shutil.rmtree(workdir)
        tty.die(str(e))
    # Delay creating spec.prefix until verification is complete
    # and any relocation has been done.
    else:
        install_tree(workdir, spec.prefix, symlinks=True)
    finally:
        shutil.rmtree(tmpdir)
Exemplo n.º 32
0
def extract_tarball(spec,
                    filename,
                    allow_root=False,
                    unsigned=False,
                    force=False):
    """
    extract binary tarball for given package into install area
    """
    if os.path.exists(spec.prefix):
        if force:
            shutil.rmtree(spec.prefix)
        else:
            raise NoOverwriteException(str(spec.prefix))

    tmpdir = tempfile.mkdtemp()
    stagepath = os.path.dirname(filename)
    spackfile_name = tarball_name(spec, '.spack')
    spackfile_path = os.path.join(stagepath, spackfile_name)
    tarfile_name = tarball_name(spec, '.tar.gz')
    tarfile_path = os.path.join(tmpdir, tarfile_name)
    specfile_name = tarball_name(spec, '.spec.yaml')
    specfile_path = os.path.join(tmpdir, specfile_name)

    with closing(tarfile.open(spackfile_path, 'r')) as tar:
        tar.extractall(tmpdir)
    # some buildcache tarfiles use bzip2 compression
    if not os.path.exists(tarfile_path):
        tarfile_name = tarball_name(spec, '.tar.bz2')
        tarfile_path = os.path.join(tmpdir, tarfile_name)
    if not unsigned:
        if os.path.exists('%s.asc' % specfile_path):
            try:
                suppress = config.get('config:suppress_gpg_warnings', False)
                Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
            except Exception as e:
                shutil.rmtree(tmpdir)
                raise e
        else:
            shutil.rmtree(tmpdir)
            raise NoVerifyException(
                "Package spec file failed signature verification.\n"
                "Use spack buildcache keys to download "
                "and install a key for verification from the mirror.")
    # get the sha256 checksum of the tarball
    checksum = checksum_tarball(tarfile_path)

    # get the sha256 checksum recorded at creation
    spec_dict = {}
    with open(specfile_path, 'r') as inputfile:
        content = inputfile.read()
        spec_dict = syaml.load(content)
    bchecksum = spec_dict['binary_cache_checksum']

    # if the checksums don't match don't install
    if bchecksum['hash'] != checksum:
        shutil.rmtree(tmpdir)
        raise NoChecksumException(
            "Package tarball failed checksum verification.\n"
            "It cannot be installed.")

    new_relative_prefix = str(
        os.path.relpath(spec.prefix, spack.store.layout.root))
    # if the original relative prefix is in the spec file use it
    buildinfo = spec_dict.get('buildinfo', {})
    old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
    rel = buildinfo.get('relative_rpaths')
    # if the original relative prefix and new relative prefix differ the
    # directory layout has changed and the  buildcache cannot be installed
    # if it was created with relative rpaths
    info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s'
    tty.debug(info % (old_relative_prefix, new_relative_prefix, rel))
    #    if (old_relative_prefix != new_relative_prefix and (rel)):
    #        shutil.rmtree(tmpdir)
    #        msg = "Package tarball was created from an install "
    #        msg += "prefix with a different directory layout. "
    #        msg += "It cannot be relocated because it "
    #        msg += "uses relative rpaths."
    #        raise NewLayoutException(msg)

    # extract the tarball in a temp directory
    with closing(tarfile.open(tarfile_path, 'r')) as tar:
        tar.extractall(path=tmpdir)
    # get the parent directory of the file .spack/binary_distribution
    # this should the directory unpacked from the tarball whose
    # name is unknown because the prefix naming is unknown
    bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0]
    workdir = re.sub('/.spack/binary_distribution$', '', bindist_file)
    tty.debug('workdir %s' % workdir)
    # install_tree copies hardlinks
    # create a temporary tarfile from prefix and exract it to workdir
    # tarfile preserves hardlinks
    temp_tarfile_name = tarball_name(spec, '.tar')
    temp_tarfile_path = os.path.join(tmpdir, temp_tarfile_name)
    with closing(tarfile.open(temp_tarfile_path, 'w')) as tar:
        tar.add(name='%s' % workdir, arcname='.')
    with closing(tarfile.open(temp_tarfile_path, 'r')) as tar:
        tar.extractall(spec.prefix)
    os.remove(temp_tarfile_path)

    # cleanup
    os.remove(tarfile_path)
    os.remove(specfile_path)

    try:
        relocate_package(spec, allow_root)
    except Exception as e:
        shutil.rmtree(spec.prefix)
        raise e
    else:
        manifest_file = os.path.join(spec.prefix,
                                     spack.store.layout.metadata_dir,
                                     spack.store.layout.manifest_file_name)
        if not os.path.exists(manifest_file):
            spec_id = spec.format('{name}/{hash:7}')
            tty.warn('No manifest file in tarball for spec %s' % spec_id)
    finally:
        shutil.rmtree(tmpdir)
        if os.path.exists(filename):
            os.remove(filename)
Exemplo n.º 33
0
def test_ci_generate_bootstrap_artifacts_buildcache(tmpdir,
                                                    mutable_mock_env_path,
                                                    env_deactivate,
                                                    install_mockery,
                                                    mock_packages):
    """Test that we can bootstrap a compiler when artifacts buildcache
    is turned on"""
    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write("""\
spack:
  definitions:
    - bootstrap:
      - [email protected]
  specs:
    - dyninst%[email protected]
  mirrors:
    some-mirror: https://my.fake.mirror
  gitlab-ci:
    bootstrap:
      - name: bootstrap
        compiler-agnostic: true
    mappings:
      - match:
          - arch=test-debian6-x86_64
        runner-attributes:
          tags:
            - donotcare
    enable-artifacts-buildcache: True
""")

    needs_graph = {
        '(bootstrap) conflict': [],
        '(bootstrap) gcc': [
            '(bootstrap) conflict',
        ],
        '(specs) libelf': [
            '(bootstrap) gcc',
            '(bootstrap) conflict',
        ],
        '(specs) libdwarf': [
            '(bootstrap) gcc',
            '(bootstrap) conflict',
            '(specs) libelf',
        ],
        '(specs) dyninst': [
            '(bootstrap) gcc',
            '(bootstrap) conflict',
            '(specs) libelf',
            '(specs) libdwarf',
        ],
    }

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        outputfile = str(tmpdir.join('.gitlab-ci.yml'))

        with ev.read('test'):
            ci_cmd('generate', '--output-file', outputfile)

        with open(outputfile) as f:
            contents = f.read()
            yaml_contents = syaml.load(contents)
            _validate_needs_graph(yaml_contents, needs_graph, True)
Exemplo n.º 34
0
Arquivo: yaml.py Projeto: rorist/spack
 def setUp(self):
     self.data = syaml.load(test_file)
Exemplo n.º 35
0
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
                              install_mockery, mock_packages, mock_fetch,
                              mock_stage, mock_gnupghome):
    working_dir = tmpdir.join('working_dir')

    mirror_dir = working_dir.join('mirror')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    signing_key_dir = spack_paths.mock_gpg_keys_path
    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
    with open(signing_key_path) as fd:
        signing_key = fd.read()

    ci.import_signing_key(signing_key)

    spack_yaml_contents = """
spack:
 definitions:
   - packages: [patchelf]
 specs:
   - $packages
 mirrors:
   test-mirror: {0}
""".format(mirror_url)

    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write(spack_yaml_contents)

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        with ev.read('test') as env:
            spec_map = ci.get_concrete_specs('patchelf', 'patchelf', '',
                                             'FIND_ANY')
            concrete_spec = spec_map['patchelf']
            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
            yaml_path = str(tmpdir.join('spec.yaml'))
            with open(yaml_path, 'w') as ypfd:
                ypfd.write(spec_yaml)

            install_cmd('--keep-stage', yaml_path)

            # env, spec, yaml_path, mirror_url, build_id, sign_binaries
            ci.push_mirror_contents(env, concrete_spec, yaml_path, mirror_url,
                                    '42', True)

            buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')

            # Test generating buildcache index while we have bin mirror
            buildcache_cmd('update-index', '--mirror-url', mirror_url)
            index_path = os.path.join(buildcache_path, 'index.json')
            with open(index_path) as idx_fd:
                index_object = json.load(idx_fd)
                validate(index_object, db_idx_schema)

            # Now that index is regenerated, validate "buildcache list" output
            buildcache_list_output = buildcache_cmd('list', output=str)
            assert ('patchelf' in buildcache_list_output)

            # Also test buildcache_spec schema
            bc_files_list = os.listdir(buildcache_path)
            for file_name in bc_files_list:
                if file_name.endswith('.spec.yaml'):
                    spec_yaml_path = os.path.join(buildcache_path, file_name)
                    with open(spec_yaml_path) as yaml_fd:
                        yaml_object = syaml.load(yaml_fd)
                        validate(yaml_object, spec_yaml_schema)

            logs_dir = working_dir.join('logs_dir')
            if not os.path.exists(logs_dir.strpath):
                os.makedirs(logs_dir.strpath)

            ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath)

            logs_dir_list = os.listdir(logs_dir.strpath)

            assert ('spack-build-out.txt' in logs_dir_list)

            # Also just make sure that if something goes wrong with the
            # stage logs copy, no exception is thrown
            ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)

            dl_dir = working_dir.join('download_dir')
            if not os.path.exists(dl_dir.strpath):
                os.makedirs(dl_dir.strpath)

            buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
                           dl_dir.strpath, '--require-cdashid')

            dl_dir_list = os.listdir(dl_dir.strpath)

            assert (len(dl_dir_list) == 3)
Exemplo n.º 36
0
def validate(configuration_file):
    """Validate a Spack environment YAML file that is being used to generate a
    recipe for a container.

    Since a few attributes of the configuration must have specific values for
    the container recipe, this function returns a sanitized copy of the
    configuration in the input file. If any modification is needed, a warning
    will be issued.

    Args:
        configuration_file (str): path to the Spack environment YAML file

    Returns:
        A sanitized copy of the configuration stored in the input file
    """
    import jsonschema
    with open(configuration_file) as f:
        config = syaml.load(f)

    # Ensure we have a "container" attribute with sensible defaults set
    env_dict = spack.environment.config_dict(config)
    env_dict.setdefault('container', {
        'format': 'docker',
        'base': {
            'image': 'ubuntu:18.04',
            'spack': 'develop'
        }
    })
    env_dict['container'].setdefault('format', 'docker')
    env_dict['container'].setdefault('base', {
        'image': 'ubuntu:18.04',
        'spack': 'develop'
    })

    # Remove attributes that are not needed / allowed in the
    # container recipe
    for subsection in ('cdash', 'gitlab_ci', 'modules'):
        if subsection in env_dict:
            msg = ('the subsection "{0}" in "{1}" is not used when generating'
                   ' container recipes and will be discarded')
            warnings.warn(msg.format(subsection, configuration_file))
            env_dict.pop(subsection)

    # Set the default value of the concretization strategy to "together" and
    # warn if the user explicitly set another value
    env_dict.setdefault('concretization', 'together')
    if env_dict['concretization'] != 'together':
        msg = ('the "concretization" attribute of the environment is set '
               'to "{0}" [the advised value is instead "together"]')
        warnings.warn(msg.format(env_dict['concretization']))

    # Check if the install tree was explicitly set to a custom value and warn
    # that it will be overridden
    environment_config = env_dict.get('config', {})
    if environment_config.get('install_tree', None):
        msg = ('the "config:install_tree" attribute has been set explicitly '
               'and will be overridden in the container image')
        warnings.warn(msg)

    # Likewise for the view
    environment_view = env_dict.get('view', None)
    if environment_view:
        msg = ('the "view" attribute has been set explicitly '
               'and will be overridden in the container image')
        warnings.warn(msg)

    jsonschema.validate(config, schema=env.schema)
    return config
Exemplo n.º 37
0
def config_update(args):
    # Read the configuration files
    spack.config.config.get_config(args.section, scope=args.scope)
    updates = spack.config.config.format_updates[args.section]

    cannot_overwrite, skip_system_scope = [], False
    for scope in updates:
        cfg_file = spack.config.config.get_config_filename(
            scope.name, args.section)
        scope_dir = scope.path
        can_be_updated = _can_update_config_file(scope_dir, cfg_file)
        if not can_be_updated:
            if scope.name == 'system':
                skip_system_scope = True
                msg = ('Not enough permissions to write to "system" scope. '
                       'Skipping update at that location [cfg={0}]')
                tty.warn(msg.format(cfg_file))
                continue
            cannot_overwrite.append((scope, cfg_file))

    if cannot_overwrite:
        msg = 'Detected permission issues with the following scopes:\n\n'
        for scope, cfg_file in cannot_overwrite:
            msg += '\t[scope={0}, cfg={1}]\n'.format(scope.name, cfg_file)
        msg += ('\nEither ensure that you have sufficient permissions to '
                'modify these files or do not include these scopes in the '
                'update.')
        tty.die(msg)

    if skip_system_scope:
        updates = [x for x in updates if x.name != 'system']

    # Report if there are no updates to be done
    if not updates:
        msg = 'No updates needed for "{0}" section.'
        tty.msg(msg.format(args.section))
        return

    proceed = True
    if not args.yes_to_all:
        msg = ('The following configuration files are going to be updated to'
               ' the latest schema format:\n\n')
        for scope in updates:
            cfg_file = spack.config.config.get_config_filename(
                scope.name, args.section)
            msg += '\t[scope={0}, file={1}]\n'.format(scope.name, cfg_file)
        msg += ('\nIf the configuration files are updated, versions of Spack '
                'that are older than this version may not be able to read '
                'them. Spack stores backups of the updated files which can '
                'be retrieved with "spack config revert"')
        tty.msg(msg)
        proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)

    if not proceed:
        tty.die('Operation aborted.')

    # Get a function to update the format
    update_fn = spack.config.ensure_latest_format_fn(args.section)
    for scope in updates:
        cfg_file = spack.config.config.get_config_filename(
            scope.name, args.section)
        with open(cfg_file) as f:
            data = syaml.load(f) or {}
            data = data.pop(args.section, {})
        update_fn(data)

        # Make a backup copy and rewrite the file
        bkp_file = cfg_file + '.bkp'
        shutil.copy(cfg_file, bkp_file)
        spack.config.config.update_config(args.section,
                                          data,
                                          scope=scope.name,
                                          force=True)
        msg = 'File "{0}" updated [backup={1}]'
        tty.msg(msg.format(cfg_file, bkp_file))
Exemplo n.º 38
0
Arquivo: config.py Projeto: LLNL/spack
def check_schema(name, file_contents):
    """Check a Spack YAML schema against some data"""
    f = StringIO(file_contents)
    data = syaml.load(f)
    spack.config._validate(data, name)
Exemplo n.º 39
0
 def setUp(self):
     self.data = syaml.load(test_file)
Exemplo n.º 40
0
def check_schema(name, file_contents):
    """Check a Spack YAML schema against some data"""
    f = StringIO(file_contents)
    data = syaml.load(f)
    spack.config._validate(data, name)