Example #1
0
def actions_to_pins(actions):
    if utils.conda_43():
        spec_name = lambda x: x.dist_name
    else:
        spec_name = lambda x: x
    return [
        ' '.join(spec_name(spec).split()[0].rsplit('-', 2))
        for spec in actions['LINK']
    ]
Example #2
0
def get_upstream_pins(m, dependencies, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    dependencies = [strip_channel(dep) for dep in dependencies]
    random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
                            for _ in range(10))
    with TemporaryDirectory(suffix=random_string) as tmpdir:
        actions = environ.get_install_actions(tmpdir, index, dependencies,
                                            m.config)
        additional_specs = []
        linked_packages = actions['LINK']
        # edit the plan to download all necessary packages
        for key in ('LINK', 'EXTRACT', 'UNLINK'):
            if key in actions:
                del actions[key]
        # this should be just downloading packages.  We don't need to extract them -
        #    we read contents directly
        if actions:
            plan.execute_actions(actions, index, verbose=m.config.debug)

            pkgs_dirs = cc.pkgs_dirs + list(m.config.bldpkgs_dirs)
            for pkg in linked_packages:
                for pkgs_dir in pkgs_dirs:
                    if hasattr(pkg, 'dist_name'):
                        pkg_dist = pkg.dist_name
                    else:
                        pkg = strip_channel(pkg)
                        pkg_dist = pkg.split(' ')[0]

                    pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                    pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                    if os.path.isdir(pkg_dir):
                        downstream_file = os.path.join(pkg_dir, 'info/pin_downstream')
                        if os.path.isfile(downstream_file):
                            additional_specs.extend(open(downstream_file).read().splitlines())
                        break
                    elif os.path.isfile(pkg_file):
                        extra_specs = utils.package_has_file(pkg_file, 'info/pin_downstream')
                        if extra_specs:
                            additional_specs.extend(extra_specs.splitlines())
                        break
                    elif utils.conda_43():
                        # TODO: this is a vile hack reaching into conda's internals. Replace with
                        #    proper conda API when available.
                        try:
                            pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                        index=index)
                            pfe.execute()
                            for pkgs_dir in pkgs_dirs:
                                pkg_file = os.path.join(pkgs_dir, pkg.dist_name + '.tar.bz2')
                                if os.path.isfile(pkg_file):
                                    extra_specs = utils.package_has_file(pkg_file,
                                                                        'info/pin_downstream')
                                    if extra_specs:
                                        additional_specs.extend(extra_specs.splitlines())
                                    break
                            break
                        except KeyError:
                            raise DependencyNeedsBuildingError(packages=[pkg.name])
                else:
                    raise RuntimeError("Didn't find expected package {} in package cache ({})"
                                        .format(pkg_dist, pkgs_dirs))

    return additional_specs
Example #3
0
def get_upstream_pins(m, dependencies, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    dependencies = [strip_channel(dep) for dep in dependencies]
    # Add _tmp here to prevent creating the build_prefix too early. This is because, when
    # dirty is set, we skip calling create_env if the folder already exists.
    actions = environ.get_install_actions(m.config.build_prefix[:-4] + "_tmp",
                                          index, dependencies, m.config)
    additional_specs = []
    linked_packages = actions['LINK']
    # edit the plan to download all necessary packages
    if 'LINK' in actions:
        del actions['LINK']
    if 'EXTRACT' in actions:
        del actions['EXTRACT']
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if actions:
        plan.execute_actions(actions, index, verbose=m.config.debug)

        pkgs_dirs = cc.pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir,
                                                   'info/pin_downstream')
                    if os.path.isfile(downstream_file):
                        additional_specs.extend(
                            open(downstream_file).read().splitlines())
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/pin_downstream')
                    if extra_specs:
                        additional_specs.extend(extra_specs.splitlines())
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with proper
                    #    conda API when available.
                    try:
                        pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                      index=index)
                        pfe.execute()
                        for pkgs_dir in pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/pin_downstream')
                                if extra_specs:
                                    additional_specs.extend(
                                        extra_specs.splitlines())
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, pkgs_dirs))

    return additional_specs
Example #4
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    additional_specs = []
    linked_packages = actions.get('LINK', [])
    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)

    if actions:
        execute_actions(actions, index, verbose=m.config.debug)
        ignore_list = utils.ensure_list(
            m.get_value('build/ignore_run_exports'))

        _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in _pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir, 'info/run_exports')
                    if os.path.isfile(downstream_file):
                        specs = open(downstream_file).read().splitlines()
                        additional_specs.extend(
                            _filter_run_exports(specs, ignore_list))
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/run_exports')
                    if extra_specs:
                        # exclude packages pinning themselves (makes no sense)
                        extra_specs = [
                            spec for spec in extra_specs.splitlines()
                            if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                        ]
                        additional_specs.extend(
                            _filter_run_exports(extra_specs, ignore_list))
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with
                    #    proper conda API when available.
                    try:
                        try:
                            # the conda 4.4 API uses a single `link_prefs` kwarg
                            # whereas conda 4.3 used `index` and `link_dists` kwargs
                            pfe = ProgressiveFetchExtract(
                                link_prefs=(index[pkg], ))
                        except TypeError:
                            # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                            pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                          index=index)
                        with utils.LoggingContext():
                            pfe.execute()
                        for pkgs_dir in _pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/run_exports')
                                if extra_specs:
                                    specs = extra_specs.splitlines()
                                    additional_specs.extend(
                                        _filter_run_exports(
                                            specs, ignore_list))
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, _pkgs_dirs))
    return additional_specs
Example #5
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    # this attribute is added in the first pass of finalize_outputs_pass
    raw_specs = (m.original_meta.get('requirements', {}).get(env, [])
                 if hasattr(m, 'original_meta') else [])
    explicit_specs = [req.split(' ')[0] for req in raw_specs]
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
    additional_specs = {}
    for pkg in linked_packages:
        pkg_loc = None
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        for pkgs_dir in _pkgs_dirs:
            pkg_dir = os.path.join(pkgs_dir, pkg_dist)
            pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')

            if os.path.isdir(pkg_dir):
                pkg_loc = pkg_dir
                break
            elif os.path.isfile(pkg_file):
                pkg_loc = pkg_file
                break

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and utils.conda_43():
            try:
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg], ))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break

        specs = {}
        if os.path.isdir(pkg_loc):
            downstream_file = os.path.join(pkg_dir, 'info/run_exports')
            if os.path.isfile(downstream_file):
                with open(downstream_file) as f:
                    specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
            # a later attempt: record more info in the yaml file, to support "strong" run exports
            elif os.path.isfile(downstream_file + '.yaml'):
                with open(downstream_file + '.yaml') as f:
                    specs = yaml.safe_load(f)
        elif os.path.isfile(pkg_file):
            legacy_specs = utils.package_has_file(pkg_file, 'info/run_exports')
            specs_yaml = utils.package_has_file(pkg_file,
                                                'info/run_exports.yaml')
            if specs:
                # exclude packages pinning themselves (makes no sense)
                specs = {
                    'weak': [
                        spec.rstrip() for spec in legacy_specs.splitlines()
                        if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                    ]
                }
            elif specs_yaml:
                specs = yaml.safe_load(specs_yaml)

        additional_specs = utils.merge_dicts_of_lists(
            additional_specs, _filter_run_exports(specs, ignore_list))
    return additional_specs
Example #6
0
import os

import pytest

from conda_build import api
from conda_build.utils import conda_43

thisdir = os.path.dirname(os.path.abspath(__file__))


@pytest.mark.skipif(conda_43(), reason="conda 4.3 removed sign support")
def test_import_sign_key():
    api.import_sign_key(os.path.join(thisdir, 'test_key'))
    keypath = os.path.expanduser("~/.conda/keys/test_key")
    try:
        assert os.path.isfile(keypath)
        assert os.path.isfile(keypath + '.pub')
    except:
        raise
    finally:
        os.remove(keypath)
        os.remove(keypath + '.pub')
Example #7
0
def get_upstream_pins(m, actions, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    additional_specs = []
    linked_packages = actions.get('LINK', [])
    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if actions:
        execute_actions(actions, index, verbose=m.config.debug)

        _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in _pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir, 'info/run_exports')
                    if os.path.isfile(downstream_file):
                        additional_specs.extend(
                            open(downstream_file).read().splitlines())
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/run_exports')
                    if extra_specs:
                        # exclude packages pinning themselves (makes no sense)
                        extra_specs = [
                            spec for spec in extra_specs
                            if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                        ]
                        additional_specs.extend(extra_specs.splitlines())
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with
                    #    proper conda API when available.
                    try:
                        pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                      index=index)
                        pfe.execute()
                        for pkgs_dir in _pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/run_exports')
                                if extra_specs:
                                    additional_specs.extend(
                                        extra_specs.splitlines())
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, _pkgs_dirs))

    return additional_specs
Example #8
0
    assert 'conda_build_version' in about and about[
        'conda_build_version'] == __version__
    assert 'channels' in about and about['channels']
    try:
        assert 'env_vars' in about and about['env_vars']
    except AssertionError:
        # new versions of conda support this, so we should raise errors.
        if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'):
            raise
        else:
            pass

    assert 'root_pkgs' in about and about['root_pkgs']


@pytest.mark.xfail(not conda_43(),
                   reason="new noarch supported starting with conda 4.3")
def test_noarch_python_with_tests(test_config):
    recipe = os.path.join(metadata_dir, "_noarch_python_with_tests")
    api.build(recipe, config=test_config)


def test_noarch_python_1(test_config):
    recipe = os.path.join(metadata_dir, "_noarch_python")
    fn = api.get_output_file_path(recipe, config=test_config)
    api.build(recipe, config=test_config)
    assert package_has_file(fn, 'info/files') is not ''
    extra = json.loads(
        package_has_file(fn, 'info/package_metadata.json').decode())
    assert 'noarch' in extra
    assert 'entry_points' in extra['noarch']