Example #1
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))
    additional_specs = {}
    for pkg in linked_packages:
        run_exports = None
        if m.config.use_channeldata:
            channeldata = utils.download_channeldata(pkg.channel)
            # only use channeldata if requested, channeldata exists and contains
            # a packages key, otherwise use run_exports from the packages themselves
            if 'packages' in channeldata:
                pkg_data = channeldata['packages'].get(pkg.name, {})
                run_exports = pkg_data.get('run_exports',
                                           {}).get(pkg.version, {})
        if run_exports is None:
            loc, dist = execute_download_actions(m,
                                                 actions,
                                                 env=env,
                                                 package_subset=pkg)[pkg]
            run_exports = _read_specs_from_package(loc, dist)
        specs = _filter_run_exports(run_exports, ignore_list)
        if specs:
            additional_specs = utils.merge_dicts_of_lists(
                additional_specs, specs)
    return additional_specs
Example #2
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))
    additional_specs = {}
    run_exports = {}
    empty_run_exports = False
    for pkg in linked_packages:
        channeldata = utils.download_channeldata(pkg.channel)
        if channeldata:
            pkg_data = channeldata.get('packages', {}).get(pkg.name, {})
            run_exports = pkg_data.get('run_exports', {}).get(pkg.version, {})
            empty_run_exports = run_exports == {}
        if not run_exports and not empty_run_exports:
            locs_and_dists = execute_download_actions(
                m, actions, env=env, package_subset=linked_packages)
            locs_and_dists = [v for k, v in locs_and_dists.items() if k == pkg]
            run_exports = _read_specs_from_package(*next(iter(locs_and_dists)))
        specs = _filter_run_exports(run_exports, ignore_list)
        if specs:
            additional_specs = utils.merge_dicts_of_lists(
                additional_specs, specs)
    return additional_specs
Example #3
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))
    additional_specs = {}
    for pkg in linked_packages:
        channeldata = utils.download_channeldata(pkg.channel)
        run_exports = channeldata.get('packages',
                                      {}).get(pkg.name,
                                              {}).get('run_exports',
                                                      {}).get(pkg.version, {})
        specs = _filter_run_exports(run_exports, ignore_list)
        if specs:
            additional_specs = utils.merge_dicts_of_lists(
                additional_specs, specs)
    return additional_specs
Example #4
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]

    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config, '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug, verbose=m.config.verbose,
                                      locking=m.config.locking, timeout=m.config.timeout)
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    additional_specs = {}
    for pkg in linked_packages:
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        pkg_loc = find_pkg_dir_or_file_in_pkgs_dirs(pkg_dist, m)

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg],))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break

        specs = _read_specs_from_package(pkg_loc, pkg_dist)

        additional_specs = utils.merge_dicts_of_lists(additional_specs,
                                                      _filter_run_exports(specs, ignore_list))
    return additional_specs
Example #5
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]

    pkg_locs_and_dists = execute_download_actions(m, actions, env=env,
                                                  package_subset=linked_packages)

    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    additional_specs = {}
    for (loc, dist) in pkg_locs_and_dists.values():
        specs = _read_specs_from_package(loc, dist)
        additional_specs = utils.merge_dicts_of_lists(additional_specs,
                                                      _filter_run_exports(specs, ignore_list))
    return additional_specs
Example #6
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    env_specs = m.meta.get('requirements', {}).get(env, [])
    explicit_specs = [req.split(' ')[0] for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [pkg for pkg in linked_packages if pkg.name in explicit_specs]

    pkg_locs_and_dists = execute_download_actions(m, actions, env=env,
                                                  package_subset=linked_packages)

    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    additional_specs = {}
    for (loc, dist) in pkg_locs_and_dists.values():
        specs = _read_specs_from_package(loc, dist)
        additional_specs = utils.merge_dicts_of_lists(additional_specs,
                                                      _filter_run_exports(specs, ignore_list))
    return additional_specs
Example #7
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    # this attribute is added in the first pass of finalize_outputs_pass
    extract_pattern = r'(.*)package:'
    template_string = '\n'.join((
        m.get_recipe_text(extract_pattern=extract_pattern,
                          force_top_level=True),
        # second item: the requirements text for this particular metadata
        #    object (might be output)
        m.extract_requirements_text())).rstrip()
    raw_specs = {}
    if template_string:
        raw_specs = yaml.safe_load(
            m._get_contents(permit_undefined_jinja=False,
                            template_string=template_string)) or {}

    env_specs = utils.expand_reqs(raw_specs.get('requirements',
                                                {})).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
    additional_specs = {}
    for pkg in linked_packages:
        pkg_loc = None
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        for pkgs_dir in _pkgs_dirs:
            pkg_dir = os.path.join(pkgs_dir, pkg_dist)
            pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')

            if os.path.isdir(pkg_dir):
                pkg_loc = pkg_dir
                break
            elif os.path.isfile(pkg_file):
                pkg_loc = pkg_file
                break

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg], ))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break

        specs = {}
        if os.path.isdir(pkg_loc):
            downstream_file = os.path.join(pkg_dir, 'info/run_exports')
            if os.path.isfile(downstream_file):
                with open(downstream_file) as f:
                    specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
            # a later attempt: record more info in the yaml file, to support "strong" run exports
            elif os.path.isfile(downstream_file + '.yaml'):
                with open(downstream_file + '.yaml') as f:
                    specs = yaml.safe_load(f)
        elif os.path.isfile(pkg_file):
            legacy_specs = utils.package_has_file(pkg_file, 'info/run_exports')
            specs_yaml = utils.package_has_file(pkg_file,
                                                'info/run_exports.yaml')
            if specs:
                # exclude packages pinning themselves (makes no sense)
                specs = {
                    'weak': [
                        spec.rstrip() for spec in legacy_specs.splitlines()
                        if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                    ]
                }
            elif specs_yaml:
                specs = yaml.safe_load(specs_yaml)

        additional_specs = utils.merge_dicts_of_lists(
            additional_specs, _filter_run_exports(specs, ignore_list))
    return additional_specs