Example #1
0
def install_from_queue(work_queue, installation_result_queue, kwargs):
    while True:
        try:
            jobs, serialized_spec = work_queue.get(block=True)
            spec = Spec.from_yaml(serialized_spec).concretized()

            if jobs:
                kwargs['make_jobs'] = jobs

            if kwargs['make_jobs'] is None:
                kwargs['make_jobs'] = get_cpu_count()

            kwargs['install_deps'] = False

            tty.msg('Installing %s with %s jobs' %
                    (spec.name, kwargs['make_jobs']))
            with tty.SuppressOutput(msg_enabled=False,
                                    warn_enabled=False,
                                    error_enabled=False):
                spec.package.do_install(**kwargs)

            installation_result_queue.put_nowait((None, serialized_spec))
        except Exception as e:
            tty.error(e)
            traceback.print_exc()
            installation_result_queue.put_nowait(('ERROR', serialized_spec))
Example #2
0
def get_concrete_specs(root_spec, job_name, related_builds, compiler_action):
    spec_map = {
        'root': None,
        'deps': {},
    }

    if compiler_action == 'FIND_ANY':
        # This corresponds to a bootstrapping phase where we need to
        # rely on any available compiler to build the package (i.e. the
        # compiler needed to be stripped from the spec when we generated
        # the job), and thus we need to concretize the root spec again.
        tty.debug('About to concretize {0}'.format(root_spec))
        concrete_root = Spec(root_spec).concretized()
        tty.debug('Resulting concrete root: {0}'.format(concrete_root))
    else:
        # in this case, either we're relying on Spack to install missing
        # compiler bootstrapped in a previous phase, or else we only had one
        # phase (like a site which already knows what compilers are available
        # on it's runners), so we don't want to concretize that root spec
        # again.  The reason we take this path in the first case (bootstrapped
        # compiler), is that we can't concretize a spec at this point if we're
        # going to ask spack to "install_missing_compilers".
        concrete_root = Spec.from_yaml(
            str(zlib.decompress(base64.b64decode(root_spec)).decode('utf-8')))

    spec_map['root'] = concrete_root
    spec_map[job_name] = concrete_root[job_name]

    if related_builds:
        for dep_job_name in related_builds.split(';'):
            spec_map['deps'][dep_job_name] = concrete_root[dep_job_name]

    return spec_map
Example #3
0
def read_package_info(want_specs=True):
    dev.environment.bootstrap_environment()
    packages_filename = os.path.join(os.environ['SPACKDEV_BASE'],
                                     dev.spackdev_aux_packages_sd_file)
    with open(packages_filename, 'r') as f:
        first_line = f.readline().rstrip()
        if first_line.find('[') > -1:
            tty.die(
                'packages.sd in obsolete (unsafe) format: please re-execute spack init or initialize a new spackdev area.'
            )
        requesteds = [
            DevPackageInfo(package_arg) for package_arg in first_line.split()
        ]
        additional = [
            DevPackageInfo(package_arg)
            for package_arg in f.readline().rstrip().split()
        ]
        deps = f.readline().rstrip().split()

    install_specs = []
    if want_specs:
        specs_dir = os.path.join(os.environ['SPACKDEV_BASE'],
                                 dev.spackdev_aux_specs_subdir)
        if not os.path.exists(specs_dir):
            tty.die(
                'YAML spec information missing: please re-execute spack init or initialize a new spackdev area.'
            )
        for spec_file in os.listdir(specs_dir):
            if spec_file.endswith('.yaml'):
                with open(os.path.join(specs_dir, spec_file), 'r') as f:
                    install_specs.append(Spec.from_yaml(f))
        return requesteds, additional, deps, install_specs

    return requesteds, additional, deps
Example #4
0
    def test_yaml_subdag(self):
        spec = Spec('mpileaks^mpich+debug')
        spec.concretize()
        yaml_spec = Spec.from_yaml(spec.to_yaml())

        for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
            self.assertTrue(spec[dep].eq_dag(yaml_spec[dep]))
Example #5
0
def test_yaml_subdag(config, mock_packages):
    spec = Spec('mpileaks^mpich+debug')
    spec.concretize()
    yaml_spec = Spec.from_yaml(spec.to_yaml())

    for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
        assert spec[dep].eq_dag(yaml_spec[dep])
Example #6
0
def test_yaml_subdag(config, builtin_mock):
    spec = Spec('mpileaks^mpich+debug')
    spec.concretize()
    yaml_spec = Spec.from_yaml(spec.to_yaml())

    for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
        assert spec[dep].eq_dag(yaml_spec[dep])
Example #7
0
def try_download_specs(urls=None, force=False):
    '''
    Try to download the urls and cache them
    '''
    global _cached_specs
    if urls is None:
        return {}
    for link in urls:
        with Stage(link, name="build_cache", keep=True) as stage:
            if force and os.path.exists(stage.save_filename):
                os.remove(stage.save_filename)
            if not os.path.exists(stage.save_filename):
                try:
                    stage.fetch()
                except fs.FetchError:
                    continue
            with open(stage.save_filename, 'r') as f:
                # read the spec from the build cache file. All specs
                # in build caches are concrete (as they are built) so
                # we need to mark this spec concrete on read-in.
                spec = Spec.from_yaml(f)
                spec._mark_concrete()
                _cached_specs.add(spec)

    return _cached_specs
Example #8
0
def createtarball(args):
    """create a binary package from an existing install"""
    if args.spec_yaml:
        packages = set()
        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
        with open(args.spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))
    elif args.packages:
        packages = args.packages
    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()
    outdir = '.'
    if args.directory:
        outdir = args.directory
    signkey = None
    if args.key:
        signkey = args.key

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.msg('Found at least one matching spec')

    for match in matches:
        tty.msg('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.msg('skipping external or virtual spec %s' % match.format())
        else:
            tty.msg('adding matching spec %s' % match.format())
            specs.add(match)
            tty.msg('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.msg('skipping external or virtual dependency %s' %
                            node.format())
                else:
                    tty.msg('adding dependency %s' % node.format())
                    specs.add(node)

    tty.msg('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.msg('creating binary cache file for package %s ' % spec.format())
        bindist.build_tarball(spec, outdir, args.force, args.rel,
                              args.unsigned, args.allow_root, signkey,
                              not args.no_rebuild_index)
Example #9
0
def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
    """Tests a simple legacy YAML with a dependency and ensures spec survives
    concretization."""
    yaml = """
spec:
- a:
    version: '2.0'
    arch:
      platform: linux
      platform_os: rhel7
      target: x86_64
    compiler:
      name: gcc
      version: 8.3.0
    namespace: builtin.mock
    parameters:
      bvv: true
      foo:
      - bar
      foobar: bar
      cflags: []
      cppflags: []
      cxxflags: []
      fflags: []
      ldflags: []
      ldlibs: []
    dependencies:
      b:
        hash: iaapywazxgetn6gfv2cfba353qzzqvhn
        type:
        - build
        - link
    hash: obokmcsn3hljztrmctbscmqjs3xclazz
    full_hash: avrk2tqsnzxeabmxa6r776uq7qbpeufv
    build_hash: obokmcsn3hljztrmctbscmqjs3xclazy
- b:
    version: '1.0'
    arch:
      platform: linux
      platform_os: rhel7
      target: x86_64
    compiler:
      name: gcc
      version: 8.3.0
    namespace: builtin.mock
    parameters:
      cflags: []
      cppflags: []
      cxxflags: []
      fflags: []
      ldflags: []
      ldlibs: []
    hash: iaapywazxgetn6gfv2cfba353qzzqvhn
    full_hash: qvsxvlmjaothtpjluqijv7qfnni3kyyg
    build_hash: iaapywazxgetn6gfv2cfba353qzzqvhy
"""
    spec = Spec.from_yaml(yaml)
    concrete_spec = spec.concretized()
    assert concrete_spec.eq_dag(spec)
Example #10
0
    def read_spec(self, path):
        """Read the contents of a file and parse them as a spec"""
        with open(path) as f:
            spec = Spec.from_yaml(f)

        # Specs read from actual installations are always concrete
        spec._mark_concrete()
        return spec
Example #11
0
    def test_yaml_subdag(self):
        spec = Spec('mpileaks^mpich+debug')
        spec.concretize()

        yaml_spec = Spec.from_yaml(spec.to_yaml())

        for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
            self.assertTrue(spec[dep].eq_dag(yaml_spec[dep]))
Example #12
0
    def read_spec(self, path):
        """Read the contents of a file and parse them as a spec"""
        with open(path) as f:
            spec = Spec.from_yaml(f)

        # Specs read from actual installations are always concrete
        spec._mark_concrete()
        return spec
Example #13
0
    def test_read_and_write_spec(self):
        """This goes through each package in spack and creates a directory for
           it.  It then ensures that the spec for the directory's
           installed package can be read back in consistently, and
           finally that the directory can be removed by the directory
           layout.
        """
        packages = list(spack.repo.all_packages())[:max_packages]

        for pkg in packages:
            if pkg.name.startswith('external'):
                #External package tests cannot be installed
                continue            
            spec = pkg.spec

            # If a spec fails to concretize, just skip it.  If it is a
            # real error, it will be caught by concretization tests.
            try:
                spec.concretize()
            except:
                continue

            self.layout.create_install_directory(spec)

            install_dir = self.layout.path_for_spec(spec)
            spec_path = self.layout.spec_file_path(spec)

            # Ensure directory has been created in right place.
            self.assertTrue(os.path.isdir(install_dir))
            self.assertTrue(install_dir.startswith(self.tmpdir))

            # Ensure spec file exists when directory is created
            self.assertTrue(os.path.isfile(spec_path))
            self.assertTrue(spec_path.startswith(install_dir))

            # Make sure spec file can be read back in to get the original spec
            spec_from_file = self.layout.read_spec(spec_path)
            self.assertEqual(spec, spec_from_file)
            self.assertTrue(spec.eq_dag, spec_from_file)
            self.assertTrue(spec_from_file.concrete)

            # Ensure that specs that come out "normal" are really normal.
            with open(spec_path) as spec_file:
                read_separately = Spec.from_yaml(spec_file.read())

                read_separately.normalize()
                self.assertEqual(read_separately, spec_from_file)

                read_separately.concretize()
                self.assertEqual(read_separately, spec_from_file)

            # Make sure the hash of the read-in spec is the same
            self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())

            # Ensure directories are properly removed
            self.layout.remove_install_directory(spec)
            self.assertFalse(os.path.isdir(install_dir))
            self.assertFalse(os.path.exists(install_dir))
Example #14
0
def get_specs(force=False):
    """
    Get spec.yaml's for build caches available on mirror
    """
    global _cached_specs

    if _cached_specs:
        tty.debug("Using previously-retrieved specs")
        return _cached_specs

    if not spack.mirror.MirrorCollection():
        tty.warn("No Spack mirrors are currently configured")
        return {}

    urls = set()
    for mirror in spack.mirror.MirrorCollection().values():
        fetch_url_build_cache = url_util.join(mirror.fetch_url,
                                              _build_cache_relative_path)

        mirror_dir = url_util.local_file_path(fetch_url_build_cache)
        if mirror_dir:
            tty.msg("Finding buildcaches in %s" % mirror_dir)
            if os.path.exists(mirror_dir):
                files = os.listdir(mirror_dir)
                for file in files:
                    if re.search('spec.yaml', file):
                        link = url_util.join(fetch_url_build_cache, file)
                        urls.add(link)
        else:
            tty.msg("Finding buildcaches at %s" %
                    url_util.format(fetch_url_build_cache))
            p, links = web_util.spider(
                url_util.join(fetch_url_build_cache, 'index.html'))
            for link in links:
                if re.search("spec.yaml", link):
                    urls.add(link)

    _cached_specs = []
    for link in urls:
        with Stage(link, name="build_cache", keep=True) as stage:
            if force and os.path.exists(stage.save_filename):
                os.remove(stage.save_filename)
            if not os.path.exists(stage.save_filename):
                try:
                    stage.fetch()
                except fs.FetchError:
                    continue
            with open(stage.save_filename, 'r') as f:
                # read the spec from the build cache file. All specs
                # in build caches are concrete (as they are built) so
                # we need to mark this spec concrete on read-in.
                spec = Spec.from_yaml(f)
                spec._mark_concrete()
                _cached_specs.append(spec)

    return _cached_specs
Example #15
0
 def _concretize_spec(ser_spec):
     try:
         spec = Spec.from_yaml(ser_spec)
         try:
             return ser_spec, spec.concretized().to_yaml(all_deps=True)
         except Exception as e:
             tty.warn('Could not concretize %s: %s' % (spec.name, e))
     except Exception as e:
         tty.warn('Could not deserialize spec %s' % e.message)
     return None
Example #16
0
def get_specs(force=False):
    """
    Get spec.yaml's for build caches available on mirror
    """
    global _cached_specs

    if _cached_specs:
        tty.debug("Using previously-retrieved specs")
        return _cached_specs

    mirrors = spack.config.get('mirrors')
    if len(mirrors) == 0:
        tty.warn("No Spack mirrors are currently configured")
        return {}

    path = str(spack.architecture.sys_type())
    urls = set()
    for mirror_name, mirror_url in mirrors.items():
        if mirror_url.startswith('file'):
            mirror = mirror_url.replace(
                'file://', '') + "/" + _build_cache_relative_path
            tty.msg("Finding buildcaches in %s" % mirror)
            if os.path.exists(mirror):
                files = os.listdir(mirror)
                for file in files:
                    if re.search('spec.yaml', file):
                        link = 'file://' + mirror + '/' + file
                        urls.add(link)
        else:
            tty.msg("Finding buildcaches on %s" % mirror_url)
            p, links = spider(mirror_url + "/" + _build_cache_relative_path)
            for link in links:
                if re.search("spec.yaml", link) and re.search(path, link):
                    urls.add(link)

    _cached_specs = []
    for link in urls:
        with Stage(link, name="build_cache", keep=True) as stage:
            if force and os.path.exists(stage.save_filename):
                os.remove(stage.save_filename)
            if not os.path.exists(stage.save_filename):
                try:
                    stage.fetch()
                except fs.FetchError:
                    continue
            with open(stage.save_filename, 'r') as f:
                # read the spec from the build cache file. All specs
                # in build caches are concrete (as they are built) so
                # we need to mark this spec concrete on read-in.
                spec = Spec.from_yaml(f)
                spec._mark_concrete()
                _cached_specs.append(spec)

    return _cached_specs
Example #17
0
    def concrete_specs_gen(self, specs, print_time=False):
        """Concretizes specs across a pool of processes

        Returns:
             A generator that yields a tuple with the original spec and a
             concretized spec. Output is not necessarily ordered.
        """

        start_time = time.time()

        abs_list, conc_list = [], []
        for s in specs:
            conc_list.append(s) if s.concrete else abs_list.append(s)

        # Note: Json serialization does not support 'all_deps'
        yaml_specs = [s.to_yaml(all_deps=True) for s in abs_list]

        # Spin off work to the pool
        conc_spec_gen = self.pool.imap_unordered(
            ParallelConcretizer._concretize_spec, yaml_specs)

        # Hand back any specs that are already concrete while the pool works
        for concrete_spec in conc_list:
            yield (concrete_spec, concrete_spec)

        # Yield specs as the pool concretizes them
        for spec_yaml_tuple in conc_spec_gen:
            if spec_yaml_tuple is None:
                if not self.ignore_error:
                    raise Exception("Parallel concretization Failed!")
            else:
                orig_spec_yaml, conc_spec_yaml = spec_yaml_tuple
                yield (Spec.from_yaml(orig_spec_yaml),
                       Spec.from_yaml(conc_spec_yaml))

        if print_time:
            tot_time = time.time() - start_time
            spec_per_second = len(specs) / tot_time
            tty.msg(
                'Added %s specs in %s seconds (%s Spec/s)' %
                (len(specs), round(tot_time, 2), round(spec_per_second, 2)))
Example #18
0
def generate_package_index(cache_prefix):
    """Create the build cache index page.

    Creates (or replaces) the "index.json" page at the location given in
    cache_prefix.  This page contains a link for each binary package (*.yaml)
    and public key (*.key) under cache_prefix.
    """
    tmpdir = tempfile.mkdtemp()
    db_root_dir = os.path.join(tmpdir, 'db_root')
    db = spack_db.Database(None, db_dir=db_root_dir,
                           enable_transaction_locking=False,
                           record_fields=['spec', 'ref_count'])

    file_list = (
        entry
        for entry in web_util.list_url(cache_prefix)
        if entry.endswith('.yaml'))

    tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
        cache_prefix))
    for file_path in file_list:
        try:
            yaml_url = url_util.join(cache_prefix, file_path)
            tty.debug('fetching {0}'.format(yaml_url))
            _, _, yaml_file = web_util.read_from_url(yaml_url)
            yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
            # yaml_obj = syaml.load(yaml_contents)
            # s = Spec.from_yaml(yaml_obj)
            s = Spec.from_yaml(yaml_contents)
            db.add(s, None)
        except (URLError, web_util.SpackWebError) as url_err:
            tty.error('Error reading spec.yaml: {0}'.format(file_path))
            tty.error(url_err)

    try:
        index_json_path = os.path.join(db_root_dir, 'index.json')
        with open(index_json_path, 'w') as f:
            db._write_to_file(f)

        web_util.push_to_url(
            index_json_path,
            url_util.join(cache_prefix, 'index.json'),
            keep_original=False,
            extra_args={'ContentType': 'application/json'})
    finally:
        shutil.rmtree(tmpdir)
Example #19
0
def get_concrete_spec(args):
    spec_str = args.spec
    spec_yaml_path = args.spec_yaml

    if not spec_str and not spec_yaml_path:
        tty.msg('Must provide either spec string or path to ' +
                'yaml to concretize spec')
        sys.exit(1)

    if spec_str:
        try:
            spec = find_matching_specs(spec_str)[0]
            spec.concretize()
        except SpecError as spec_error:
            tty.error('Unable to concrectize spec {0}'.format(args.spec))
            tty.debug(spec_error)
            sys.exit(1)

        return spec

    with open(spec_yaml_path, 'r') as fd:
        return Spec.from_yaml(fd.read())
Example #20
0
def copy_fn(args):
    """Copy a buildcache entry and all its files from one mirror, given as
    '--base-dir', to some other mirror, specified as '--destination-url'.
    The specific buildcache entry to be copied from one location to the
    other is identified using the '--spec-file' argument."""
    # TODO: Remove after v0.18.0 release
    msg = ('"spack buildcache copy" is deprecated and will be removed from '
           'Spack starting in v0.19.0')
    warnings.warn(msg)

    if not args.spec_file:
        tty.msg('No spec yaml provided, exiting.')
        sys.exit(1)

    if not args.base_dir:
        tty.msg('No base directory provided, exiting.')
        sys.exit(1)

    if not args.destination_url:
        tty.msg('No destination mirror url provided, exiting.')
        sys.exit(1)

    dest_url = args.destination_url

    if dest_url[0:7] != 'file://' and dest_url[0] != '/':
        tty.msg('Only urls beginning with "file://" or "/" are supported ' +
                'by buildcache copy.')
        sys.exit(1)

    try:
        with open(args.spec_file, 'r') as fd:
            spec = Spec.from_yaml(fd.read())
    except Exception as e:
        tty.debug(e)
        tty.error('Unable to concrectize spec from yaml {0}'.format(
            args.spec_file))
        sys.exit(1)

    dest_root_path = dest_url
    if dest_url[0:7] == 'file://':
        dest_root_path = dest_url[7:]

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_rel_path = os.path.join(build_cache_dir,
                                    bindist.tarball_path_name(spec, '.spack'))
    tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
    tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)

    specfile_rel_path = os.path.join(build_cache_dir,
                                     bindist.tarball_name(spec, '.spec.json'))
    specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)

    specfile_rel_path_yaml = os.path.join(
        build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
    specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)

    # Make sure directory structure exists before attempting to copy
    os.makedirs(os.path.dirname(tarball_dest_path))

    # Now copy the specfile and tarball files to the destination mirror
    tty.msg('Copying {0}'.format(tarball_rel_path))
    shutil.copyfile(tarball_src_path, tarball_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path))
    shutil.copyfile(specfile_src_path, specfile_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
    shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
Example #21
0
    def install_dag(self, dag_scheduler, kwargs):
        """Installs a list of specs"""

        start_time = time.time()

        # Initialize structures relating to the process pool
        outstanding_installs = {}
        work_queue = Manager().Queue()
        installation_result_queue = Manager().Queue()

        try:
            with NonDaemonPool(processes=get_cpu_count()) as pool:

                # Create a process for each core, initialize it with async
                # queue for sending specs/receiving results
                for _ in range(get_cpu_count()):
                    pool.apply_async(
                        install_from_queue,
                        (work_queue, installation_result_queue, kwargs))

                # Initialize spec structures
                if not dag_scheduler.schedule_is_built():
                    dag_scheduler.build_schedule()
                ready_specs = set(dag_scheduler.pop_ready_specs())

                # tty.msg(self._progress_prompt_str(
                #     'Installing', 'Ready', 'Unscheduled'))

                while len(ready_specs) > 0 or len(outstanding_installs) > 0:
                    for jobs, spec in ready_specs:
                        # Note: to_json does not support all_deps
                        work_queue.put_nowait(
                            (jobs, spec.to_yaml(all_deps=True)))
                        outstanding_installs[spec.full_hash()] = spec

                    ready_specs.clear()

                    # Block until something finishes
                    # TODO put a timeout and TimeoutError handler here
                    res, serialized_spec = installation_result_queue.get(True)

                    spec = Spec.from_yaml(serialized_spec)

                    # Message indicates an error
                    if res:
                        removed_specs = list(
                            dag_scheduler.install_failed(spec))
                        rm_specs_str = '\n\t'.join(
                            s.name for s in sorted(removed_specs))
                        tty.error(
                            'Installation of "%s" failed. Skipping %d'
                            ' dependent packages: \n\t%s' %
                            (spec.name, len(removed_specs), rm_specs_str))
                    else:
                        dag_scheduler.install_successful(spec)
                        tty.msg('Installed %s' % spec.name)

                    # Greedily get all ready specs
                    for j_s in dag_scheduler.pop_ready_specs():
                        ready_specs.add(j_s)

                    outstanding_installs.pop(spec.full_hash())

        except Exception as e:
            import traceback
            tty.error("Installation pool error, %s\n" % (str(e)))
            traceback.print_exc()
        finally:
            tty.msg('Installation finished (%s)' % time.strftime(
                '%H:%M:%S', time.gmtime(time.time() - start_time)))
Example #22
0
def _createtarball(env,
                   spec_yaml=None,
                   packages=None,
                   add_spec=True,
                   add_deps=True,
                   output_location=os.getcwd(),
                   signing_key=None,
                   force=False,
                   make_relative=False,
                   unsigned=False,
                   allow_root=False,
                   rebuild_index=False):
    if spec_yaml:
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            package = '/{0}'.format(s.dag_hash())
            matches = find_matching_specs(package, env=env)

    elif packages:
        matches = find_matching_specs(packages, env=env)

    elif env:
        matches = [env.specs_by_hash[h] for h in env.concretized_order]

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package spec, an active environment," +
                " or else a path to a yaml file containing a spec" +
                " to install")
    specs = set()

    mirror = spack.mirror.MirrorCollection().lookup(output_location)
    outdir = url_util.format(mirror.push_url)

    msg = 'Buildcache files will be output to %s/build_cache' % outdir
    tty.msg(msg)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            lookup = spack.store.db.query_one(match)

            if not add_spec:
                tty.debug('skipping matching root spec %s' % match.format())
            elif lookup is None:
                tty.debug('skipping uninstalled matching spec %s' %
                          match.format())
            else:
                tty.debug('adding matching spec %s' % match.format())
                specs.add(match)

            if not add_deps:
                continue

            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                # skip root, since it's handled above
                if d == 0:
                    continue

                lookup = spack.store.db.query_one(node)

                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                elif lookup is None:
                    tty.debug('skipping uninstalled depenendency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.debug('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, force, make_relative, unsigned,
                                  allow_root, signing_key, rebuild_index)
        except bindist.NoOverwriteException as e:
            tty.warn(e)
Example #23
0
def test_read_and_write_spec(temporary_store, config, mock_packages):
    """This goes through each package in spack and creates a directory for
    it.  It then ensures that the spec for the directory's
    installed package can be read back in consistently, and
    finally that the directory can be removed by the directory
    layout.
    """
    layout = temporary_store.layout
    pkg_names = list(spack.repo.path.all_package_names())[:max_packages]

    for name in pkg_names:
        if name.startswith('external'):
            # External package tests cannot be installed
            continue

        # If a spec fails to concretize, just skip it.  If it is a
        # real error, it will be caught by concretization tests.
        try:
            spec = spack.spec.Spec(name).concretized()
        except Exception:
            continue

        layout.create_install_directory(spec)

        install_dir = path_to_os_path(layout.path_for_spec(spec))[0]
        spec_path = layout.spec_file_path(spec)

        # Ensure directory has been created in right place.
        assert os.path.isdir(install_dir)
        assert install_dir.startswith(temporary_store.root)

        # Ensure spec file exists when directory is created
        assert os.path.isfile(spec_path)
        assert spec_path.startswith(install_dir)

        # Make sure spec file can be read back in to get the original spec
        spec_from_file = layout.read_spec(spec_path)

        stored_deptypes = spack.hash_types.dag_hash
        expected = spec.copy(deps=stored_deptypes)
        expected._mark_concrete()

        assert expected.concrete
        assert expected == spec_from_file
        assert expected.eq_dag(spec_from_file)
        assert spec_from_file.concrete

        # Ensure that specs that come out "normal" are really normal.
        with open(spec_path) as spec_file:
            read_separately = Spec.from_yaml(spec_file.read())

        # TODO: revise this when build deps are in dag_hash
        norm = read_separately.copy(deps=stored_deptypes)
        assert norm == spec_from_file
        assert norm.eq_dag(spec_from_file)

        # TODO: revise this when build deps are in dag_hash
        conc = read_separately.concretized().copy(deps=stored_deptypes)
        assert conc == spec_from_file
        assert conc.eq_dag(spec_from_file)

        assert expected.dag_hash() == spec_from_file.dag_hash()

        # Ensure directories are properly removed
        layout.remove_install_directory(spec)
        assert not os.path.isdir(install_dir)
        assert not os.path.exists(install_dir)
Example #24
0
def upload_spec(args):
    """Upload a spec to s3 bucket"""
    if not args.spec and not args.spec_yaml:
        tty.error('Cannot upload spec without spec arg or path to spec yaml')
        sys.exit(1)

    if not args.base_dir:
        tty.error('No base directory for buildcache specified')
        sys.exit(1)

    if args.spec:
        try:
            spec = Spec(args.spec)
            spec.concretize()
        except Exception as e:
            tty.debug(e)
            tty.error('Unable to concrectize spec from string {0}'.format(
                args.spec))
            sys.exit(1)
    else:
        try:
            with open(args.spec_yaml, 'r') as fd:
                spec = Spec.from_yaml(fd.read())
        except Exception as e:
            tty.debug(e)
            tty.error('Unable to concrectize spec from yaml {0}'.format(
                args.spec_yaml))
            sys.exit(1)

    s3, bucket_name = get_s3_session(args.endpoint_url)

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_key = os.path.join(
        build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
    tarball_path = os.path.join(args.base_dir, tarball_key)

    specfile_key = os.path.join(
        build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
    specfile_path = os.path.join(args.base_dir, specfile_key)

    cdashidfile_key = os.path.join(
        build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
    cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key)

    tty.msg('Uploading {0}'.format(tarball_key))
    s3.meta.client.upload_file(
        tarball_path, bucket_name,
        os.path.join('mirror', tarball_key),
        ExtraArgs={'ACL': 'public-read'})

    tty.msg('Uploading {0}'.format(specfile_key))
    s3.meta.client.upload_file(
        specfile_path, bucket_name,
        os.path.join('mirror', specfile_key),
        ExtraArgs={'ACL': 'public-read'})

    if os.path.exists(cdashidfile_path):
        tty.msg('Uploading {0}'.format(cdashidfile_key))
        s3.meta.client.upload_file(
            cdashidfile_path, bucket_name,
            os.path.join('mirror', cdashidfile_key),
            ExtraArgs={'ACL': 'public-read'})
Example #25
0
def createtarball(args):
    """create a binary package from an existing install"""
    if args.spec_yaml:
        packages = set()
        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
        with open(args.spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))
    elif args.packages:
        packages = args.packages
    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    outdir = '.'
    if args.directory:
        outdir = args.directory

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    signkey = None
    if args.key:
        signkey = args.key

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            tty.debug('adding matching spec %s' % match.format())
            if "package" in args.target_type:
                specs.add(match)
            if "dependencies" not in args.target_type:
                # if the user does not want dependencies, stop here
                continue
            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    f_create = ft.partial(create_single_tarball,
                          outdir=outdir,
                          force=args.force,
                          relative=args.rel,
                          unsigned=args.unsigned,
                          allow_root=args.allow_root,
                          signkey=signkey,
                          rebuild_index=args.rebuild_index and args.jobs == 1,
                          catch_exceptions=args.jobs != 1)

    # default behavior (early termination) for one job
    if args.jobs == 1:
        for spec in specs:
            f_create(spec)

    else:
        # currently, specs cause an infinite recursion bug when pickled
        # -> as multiprocessing uses pickle internally, we need to transform
        #    specs prior to distributing the work via worker pool
        # TODO: check if specs can be pickled
        specs = [s.to_dict() for s in specs]

        pool = NoDaemonPool(args.jobs if args.jobs > 1 else mp.cpu_count())
        # chunksize=1 because we do not want to pre-allocate specs to workers
        # (since each package will need a different amount of time to be
        # compressed)
        retvals = pool.map(f_create, specs, chunksize=1)

        errors = [rv["error"] for rv in retvals if rv["error"] is not None]
        list(map(tty.error, errors))
        if len(errors) > 0:
            sys.exit(1)

        # perform rebuild of index unless user requested not to
        if args.rebuild_index:
            bindist.generate_package_index(outdir)
Example #26
0
 def check_yaml_round_trip(self, spec):
     yaml_text = spec.to_yaml()
     spec_from_yaml = Spec.from_yaml(yaml_text)
     self.assertTrue(spec.eq_dag(spec_from_yaml))
Example #27
0
def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
                   rel, unsigned, allow_root, no_rebuild_index):
    if spec_yaml:
        packages = set()
        with open(spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))

    elif packages:
        packages = packages

    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    outdir = '.'
    if directory:
        outdir = directory

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    signkey = None
    if key:
        signkey = key

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            tty.debug('adding matching spec %s' % match.format())
            specs.add(match)
            if no_deps is True:
                continue
            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    for spec in specs:
        tty.msg('creating binary cache file for package %s ' % spec.format())
        try:
            bindist.build_tarball(spec, outdir, force, rel, unsigned,
                                  allow_root, signkey, not no_rebuild_index)
        except Exception as e:
            tty.warn('%s' % e)
            pass
Example #28
0
def buildcache_copy(args):
    """Copy a buildcache entry and all its files from one mirror, given as
    '--base-dir', to some other mirror, specified as '--destination-url'.
    The specific buildcache entry to be copied from one location to the
    other is identified using the '--spec-yaml' argument."""
    # TODO: This sub-command should go away once #11117 is merged

    if not args.spec_yaml:
        tty.msg('No spec yaml provided, exiting.')
        sys.exit(1)

    if not args.base_dir:
        tty.msg('No base directory provided, exiting.')
        sys.exit(1)

    if not args.destination_url:
        tty.msg('No destination mirror url provided, exiting.')
        sys.exit(1)

    dest_url = args.destination_url

    if dest_url[0:7] != 'file://' and dest_url[0] != '/':
        tty.msg('Only urls beginning with "file://" or "/" are supported ' +
                'by buildcache copy.')
        sys.exit(1)

    try:
        with open(args.spec_yaml, 'r') as fd:
            spec = Spec.from_yaml(fd.read())
    except Exception as e:
        tty.debug(e)
        tty.error('Unable to concrectize spec from yaml {0}'.format(
            args.spec_yaml))
        sys.exit(1)

    dest_root_path = dest_url
    if dest_url[0:7] == 'file://':
        dest_root_path = dest_url[7:]

    build_cache_dir = bindist.build_cache_relative_path()

    tarball_rel_path = os.path.join(build_cache_dir,
                                    bindist.tarball_path_name(spec, '.spack'))
    tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
    tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)

    specfile_rel_path = os.path.join(build_cache_dir,
                                     bindist.tarball_name(spec, '.spec.yaml'))
    specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
    specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)

    cdashidfile_rel_path = os.path.join(build_cache_dir,
                                        bindist.tarball_name(spec, '.cdashid'))
    cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
    cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)

    # Make sure directory structure exists before attempting to copy
    os.makedirs(os.path.dirname(tarball_dest_path))

    # Now copy the specfile and tarball files to the destination mirror
    tty.msg('Copying {0}'.format(tarball_rel_path))
    shutil.copyfile(tarball_src_path, tarball_dest_path)

    tty.msg('Copying {0}'.format(specfile_rel_path))
    shutil.copyfile(specfile_src_path, specfile_dest_path)

    # Copy the cdashid file (if exists) to the destination mirror
    if os.path.exists(cdashid_src_path):
        tty.msg('Copying {0}'.format(cdashidfile_rel_path))
        shutil.copyfile(cdashid_src_path, cdashid_dest_path)
Example #29
0
def check_specs_equal(original_spec, spec_yaml_path):
    with open(spec_yaml_path, 'r') as fd:
        spec_yaml = fd.read()
        spec_from_yaml = Spec.from_yaml(spec_yaml)
        return original_spec.eq_dag(spec_from_yaml)
Example #30
0
def test_invalid_yaml_spec(invalid_yaml):
    with pytest.raises(SpackYAMLError) as e:
        Spec.from_yaml(invalid_yaml)
    exc_msg = str(e.value)
    assert exc_msg.startswith("error parsing YAML spec:")
    assert invalid_yaml in exc_msg
Example #31
0
def test_read_and_write_spec(
        layout_and_dir, config, builtin_mock
):
    """This goes through each package in spack and creates a directory for
    it.  It then ensures that the spec for the directory's
    installed package can be read back in consistently, and
    finally that the directory can be removed by the directory
    layout.
    """
    layout, tmpdir = layout_and_dir
    packages = list(spack.repo.all_packages())[:max_packages]

    for pkg in packages:
        if pkg.name.startswith('external'):
            # External package tests cannot be installed
            continue
        spec = pkg.spec

        # If a spec fails to concretize, just skip it.  If it is a
        # real error, it will be caught by concretization tests.
        try:
            spec.concretize()
        except Exception:
            continue

        layout.create_install_directory(spec)

        install_dir = layout.path_for_spec(spec)
        spec_path = layout.spec_file_path(spec)

        # Ensure directory has been created in right place.
        assert os.path.isdir(install_dir)
        assert install_dir.startswith(str(tmpdir))

        # Ensure spec file exists when directory is created
        assert os.path.isfile(spec_path)
        assert spec_path.startswith(install_dir)

        # Make sure spec file can be read back in to get the original spec
        spec_from_file = layout.read_spec(spec_path)

        # currently we don't store build dependency information when
        # we write out specs to the filesystem.

        # TODO: fix this when we can concretize more loosely based on
        # TODO: what is installed. We currently omit these to
        # TODO: increase reuse of build dependencies.
        stored_deptypes = ('link', 'run')
        expected = spec.copy(deps=stored_deptypes)
        assert expected.concrete
        assert expected == spec_from_file
        assert expected.eq_dag(spec_from_file)
        assert spec_from_file.concrete

        # Ensure that specs that come out "normal" are really normal.
        with open(spec_path) as spec_file:
            read_separately = Spec.from_yaml(spec_file.read())

        # TODO: revise this when build deps are in dag_hash
        norm = read_separately.normalized().copy(deps=stored_deptypes)
        assert norm == spec_from_file
        assert norm.eq_dag(spec_from_file)

        # TODO: revise this when build deps are in dag_hash
        conc = read_separately.concretized().copy(deps=stored_deptypes)
        assert conc == spec_from_file
        assert conc.eq_dag(spec_from_file)

        assert expected.dag_hash() == spec_from_file.dag_hash()

        # Ensure directories are properly removed
        layout.remove_install_directory(spec)
        assert not os.path.isdir(install_dir)
        assert not os.path.exists(install_dir)
Example #32
0
def test_ordered_read_not_required_for_consistent_dag_hash(
        config, mock_packages):
    """Make sure ordered serialization isn't required to preserve hashes.

    For consistent hashes, we require that YAML and json documents
    have their keys serialized in a deterministic order. However, we
    don't want to require them to be serialized in order. This
    ensures that is not required.
    """
    specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
    for spec in specs:
        spec = Spec(spec)
        spec.concretize()

        #
        # Dict & corresponding YAML & JSON from the original spec.
        #
        spec_dict = spec.to_dict()
        spec_yaml = spec.to_yaml()
        spec_json = spec.to_json()

        #
        # Make a spec with reversed OrderedDicts for every
        # OrderedDict in the original.
        #
        reversed_spec_dict = reverse_all_dicts(spec.to_dict())

        #
        # Dump to YAML and JSON
        #
        yaml_string = syaml.dump(spec_dict, default_flow_style=False)
        reversed_yaml_string = syaml.dump(reversed_spec_dict,
                                          default_flow_style=False)
        json_string = sjson.dump(spec_dict)
        reversed_json_string = sjson.dump(reversed_spec_dict)

        #
        # Do many consistency checks
        #

        # spec yaml is ordered like the spec dict
        assert yaml_string == spec_yaml
        assert json_string == spec_json

        # reversed string is different from the original, so it
        # *would* generate a different hash
        assert yaml_string != reversed_yaml_string
        assert json_string != reversed_json_string

        # build specs from the "wrongly" ordered data
        round_trip_yaml_spec = Spec.from_yaml(yaml_string)
        round_trip_json_spec = Spec.from_json(json_string)
        round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string)
        round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string)

        # TODO: remove this when build deps are in provenance.
        spec = spec.copy(deps=('link', 'run'))
        # specs are equal to the original
        assert spec == round_trip_yaml_spec
        assert spec == round_trip_json_spec
        assert spec == round_trip_reversed_yaml_spec
        assert spec == round_trip_reversed_json_spec
        assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
        assert round_trip_json_spec == round_trip_reversed_json_spec
        # dag_hashes are equal
        assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_json_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
        # full_hashes are equal
        spec.concretize()
        round_trip_yaml_spec.concretize()
        round_trip_json_spec.concretize()
        round_trip_reversed_yaml_spec.concretize()
        round_trip_reversed_json_spec.concretize()
        assert spec.full_hash() == round_trip_yaml_spec.full_hash()
        assert spec.full_hash() == round_trip_json_spec.full_hash()
        assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
        assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
Example #33
0
def test_ordered_read_not_required_for_consistent_dag_hash(
        config, builtin_mock
):
    """Make sure ordered serialization isn't required to preserve hashes.

    For consistent hashes, we require that YAML and json documents
    have their keys serialized in a deterministic order. However, we
    don't want to require them to be serialized in order. This
    ensures that is not required.
    """
    specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
    for spec in specs:
        spec = Spec(spec)
        spec.concretize()

        #
        # Dict & corresponding YAML & JSON from the original spec.
        #
        spec_dict = spec.to_dict()
        spec_yaml = spec.to_yaml()
        spec_json = spec.to_json()

        #
        # Make a spec with reversed OrderedDicts for every
        # OrderedDict in the original.
        #
        reversed_spec_dict = reverse_all_dicts(spec.to_dict())

        #
        # Dump to YAML and JSON
        #
        yaml_string = syaml.dump(spec_dict, default_flow_style=False)
        reversed_yaml_string = syaml.dump(reversed_spec_dict,
                                          default_flow_style=False)
        json_string = sjson.dump(spec_dict)
        reversed_json_string = sjson.dump(reversed_spec_dict)

        #
        # Do many consistency checks
        #

        # spec yaml is ordered like the spec dict
        assert yaml_string == spec_yaml
        assert json_string == spec_json

        # reversed string is different from the original, so it
        # *would* generate a different hash
        assert yaml_string != reversed_yaml_string
        assert json_string != reversed_json_string

        # build specs from the "wrongly" ordered data
        round_trip_yaml_spec = Spec.from_yaml(yaml_string)
        round_trip_json_spec = Spec.from_json(json_string)
        round_trip_reversed_yaml_spec = Spec.from_yaml(
            reversed_yaml_string
        )
        round_trip_reversed_json_spec = Spec.from_yaml(
            reversed_json_string
        )

        # TODO: remove this when build deps are in provenance.
        spec = spec.copy(deps=('link', 'run'))
        # specs are equal to the original
        assert spec == round_trip_yaml_spec
        assert spec == round_trip_json_spec
        assert spec == round_trip_reversed_yaml_spec
        assert spec == round_trip_reversed_json_spec
        assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
        assert round_trip_json_spec == round_trip_reversed_json_spec
        # dag_hashes are equal
        assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_json_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
        assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
Example #34
0
def check_yaml_round_trip(spec):
    yaml_text = spec.to_yaml()
    spec_from_yaml = Spec.from_yaml(yaml_text)
    assert spec.eq_dag(spec_from_yaml)
Example #35
0
def check_yaml_round_trip(spec):
    yaml_text = spec.to_yaml()
    spec_from_yaml = Spec.from_yaml(yaml_text)
    assert spec.eq_dag(spec_from_yaml)
Example #36
0
def test_read_and_write_spec(
        layout_and_dir, config, builtin_mock
):
    """This goes through each package in spack and creates a directory for
    it.  It then ensures that the spec for the directory's
    installed package can be read back in consistently, and
    finally that the directory can be removed by the directory
    layout.
    """
    layout, tmpdir = layout_and_dir
    packages = list(spack.repo.all_packages())[:max_packages]

    for pkg in packages:
        if pkg.name.startswith('external'):
            # External package tests cannot be installed
            continue
        spec = pkg.spec

        # If a spec fails to concretize, just skip it.  If it is a
        # real error, it will be caught by concretization tests.
        try:
            spec.concretize()
        except Exception:
            continue

        layout.create_install_directory(spec)

        install_dir = layout.path_for_spec(spec)
        spec_path = layout.spec_file_path(spec)

        # Ensure directory has been created in right place.
        assert os.path.isdir(install_dir)
        assert install_dir.startswith(str(tmpdir))

        # Ensure spec file exists when directory is created
        assert os.path.isfile(spec_path)
        assert spec_path.startswith(install_dir)

        # Make sure spec file can be read back in to get the original spec
        spec_from_file = layout.read_spec(spec_path)

        # currently we don't store build dependency information when
        # we write out specs to the filesystem.

        # TODO: fix this when we can concretize more loosely based on
        # TODO: what is installed. We currently omit these to
        # TODO: increase reuse of build dependencies.
        stored_deptypes = ('link', 'run')
        expected = spec.copy(deps=stored_deptypes)
        expected._mark_concrete()

        assert expected.concrete
        assert expected == spec_from_file
        assert expected.eq_dag(spec_from_file)
        assert spec_from_file.concrete

        # Ensure that specs that come out "normal" are really normal.
        with open(spec_path) as spec_file:
            read_separately = Spec.from_yaml(spec_file.read())

        # TODO: revise this when build deps are in dag_hash
        norm = read_separately.normalized().copy(deps=stored_deptypes)
        assert norm == spec_from_file
        assert norm.eq_dag(spec_from_file)

        # TODO: revise this when build deps are in dag_hash
        conc = read_separately.concretized().copy(deps=stored_deptypes)
        assert conc == spec_from_file
        assert conc.eq_dag(spec_from_file)

        assert expected.dag_hash() == spec_from_file.dag_hash()

        # Ensure directories are properly removed
        layout.remove_install_directory(spec)
        assert not os.path.isdir(install_dir)
        assert not os.path.exists(install_dir)
Example #37
0
 def check_yaml_round_trip(self, spec):
     yaml_text = spec.to_yaml()
     spec_from_yaml = Spec.from_yaml(yaml_text)
     self.assertTrue(spec.eq_dag(spec_from_yaml))