示例#1
0
def get_install_actions(prefix, index, specs, config, retries=0):
    log = utils.get_logger(__name__)
    if config.verbose:
        capture = contextlib.contextmanager(lambda: (yield))
    else:
        capture = utils.capture
    actions = {'LINK': []}
    specs = [_ensure_valid_spec(spec) for spec in specs]
    if specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with capture():
            try:
                actions = plan.install_actions(prefix, index, specs)
            except NoPackagesFoundError as exc:
                raise DependencyNeedsBuildingError(exc)
            except (SystemExit, PaddingError, LinkError,
                    DependencyNeedsBuildingError, CondaError,
                    AssertionError) as exc:
                if 'lock' in str(exc):
                    log.warn(
                        "failed to get install actions, retrying.  exception was: %s",
                        str(exc))
                elif ('requires a minimum conda version' in str(exc)
                      or 'link a source that does not' in str(exc)
                      or isinstance(exc, AssertionError)):
                    locks = utils.get_conda_operation_locks(config)
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = str(exc)
                        folder = 0
                        while os.path.dirname(
                                pkg_dir) not in pkgs_dirs and folder < 20:
                            pkg_dir = os.path.dirname(pkg_dir)
                            folder += 1
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                if retries < config.max_env_retry:
                    log.warn(
                        "failed to get install actions, retrying.  exception was: %s",
                        str(exc))
                    actions = get_install_actions(prefix,
                                                  index,
                                                  specs,
                                                  config,
                                                  retries=retries + 1)
                else:
                    log.error(
                        "Failed to get install actions, max retries exceeded.")
                    raise
        if config.disable_pip:
            actions['LINK'] = [
                spec for spec in actions['LINK'] if not spec.startswith('pip-')
                and not spec.startswith('setuptools-')
            ]
    return actions
示例#2
0
def distribute_variants(metadata, variants, index, permit_unsatisfiable_variants=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    unsatisfiable_variants = []
    packages_needing_building = set()

    for variant in variants:
        mv = metadata.copy()
        # deep copy the sensitive parts to decouple metadata objects
        mv.config = metadata.config.copy()
        mv.config.variant = combine_variants(variant, mv.config.variant)
        mv.final = False

        # TODO: may need to compute new build id, or at least remove any envs before building
        #    another variant

        if 'target_platform' in variant:
            mv.config.host_subdir = variant['target_platform']
        if not need_reparse_in_env:
            try:
                mv.parse_until_resolved()
                need_source_download = (bool(mv.meta.get('source')) and
                                        not mv.needs_source_for_render and
                                        not os.listdir(mv.config.work_dir))
                # this is a bit wasteful.  We don't store the output here - we'll have to recompute
                #    it later.  We don't store it, so that we can have per-subpackage exclusions
                #    from the hash.  Since finalizing brings in *all* build-time packages, notest
                #    just the ones from the recipe, it is impossible to remove them in the general
                #    case.  Instead, we just leave the recipe unfinalized until then, so that by
                #    excluding one higher-level package (e.g. python), we also won't include its
                #    deps in the hash
                finalize_metadata(mv, index)
            except DependencyNeedsBuildingError as e:
                unsatisfiable_variants.append(variant)
                packages_needing_building.update(set(e.packages))
                if permit_unsatisfiable_variants:
                    rendered_metadata[mv.dist()] = (mv, need_source_download,
                                                        need_reparse_in_env)
                continue
            except exceptions.UnableToParseMissingSetuptoolsDependencies:
                need_reparse_in_env = True
            except:
                raise

        # computes hashes based on whatever the current specs are - not the final specs
        #    This is a deduplication step.  Any variants that end up identical because a
        #    given variant is not used in a recipe are effectively ignored, though we still pay
        #    the price to parse for that variant.
        rendered_metadata[mv.build_id()] = (mv, need_source_download, need_reparse_in_env)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    if unsatisfiable_variants and not permit_unsatisfiable_variants:
        raise DependencyNeedsBuildingError(packages=packages_needing_building)
    return list(rendered_metadata.values())
示例#3
0
def distribute_variants(metadata,
                        variants,
                        index,
                        permit_unsatisfiable_variants=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    unsatisfiable_variants = []
    packages_needing_building = set()

    for variant in variants:
        mv = metadata.copy()
        # deep copy the sensitive parts to decouple metadata objects
        mv.config = metadata.config.copy()
        mv.config.variant = combine_variants(variant, mv.config.variant)
        mv.final = False

        # TODO: may need to compute new build id, or at least remove any envs before building
        #    another variant

        if 'target_platform' in variant:
            mv.config.host_subdir = variant['target_platform']
        if not need_reparse_in_env:
            try:
                mv.parse_until_resolved()
                need_source_download = (bool(mv.meta.get('source'))
                                        and not mv.needs_source_for_render
                                        and not os.listdir(mv.config.work_dir))
                mv = finalize_metadata(mv, index)
            except DependencyNeedsBuildingError as e:
                unsatisfiable_variants.append(variant)
                packages_needing_building.update(set(e.packages))
                if permit_unsatisfiable_variants:
                    rendered_metadata[mv.build_id()] = (mv,
                                                        need_source_download,
                                                        need_reparse_in_env)
                continue
            except exceptions.UnableToParseMissingSetuptoolsDependencies:
                need_reparse_in_env = True
            except:
                raise

        # computes hashes based on whatever the current specs are - not the final specs
        #    This is a deduplication step.  Any variants that end up identical because a
        #    given variant is not used in a recipe are effectively ignored, though we still pay
        #    the price to parse for that variant.
        rendered_metadata[mv.build_id()] = (mv, need_source_download,
                                            need_reparse_in_env)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    if unsatisfiable_variants and not permit_unsatisfiable_variants:
        raise DependencyNeedsBuildingError(packages=packages_needing_building)
    return list(rendered_metadata.values())
示例#4
0
def get_env_dependencies(m, env, variant, exclude_pattern=None):
    dash_or_under = re.compile("[-_]")
    index, index_ts = get_build_index(
        m.config, getattr(m.config, "{}_subdir".format(env)))
    specs = [ms.spec for ms in m.ms_depends(env)]
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env == 'build':
        specs = [
            spec.replace(' x.x', ' {}'.format(variant.get('numpy', "")))
            for spec in specs
        ]
    subpackages = []
    dependencies = []
    pass_through_deps = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            for key, value in variant.items():
                if dash_or_under.sub("",
                                     key) == dash_or_under.sub("", spec_name):
                    dependencies.append(" ".join((spec_name, value)))
        elif exclude_pattern.match(spec):
            pass_through_deps.append(spec.split(' ')[0])
    random_string = ''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(10))
    dependencies = list(set(dependencies))
    with TemporaryDirectory(prefix="_", suffix=random_string) as tmpdir:
        try:
            actions = environ.get_install_actions(tmpdir,
                                                  index,
                                                  dependencies,
                                                  m.config,
                                                  timestamp=index_ts)
        except UnsatisfiableError as e:
            # we'll get here if the environment is unsatisfiable
            raise DependencyNeedsBuildingError(e)

    specs = actions_to_pins(actions)
    return specs + subpackages + pass_through_deps, actions
示例#5
0
def get_env_dependencies(m, env, variant, index=None, exclude_pattern=None):
    dash_or_under = re.compile("[-_]")
    if not index:
        index = get_build_index(m.config,
                                getattr(m.config, "{}_subdir".format(env)))
    specs = [ms.spec for ms in m.ms_depends(env)]
    # replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
    if env == 'build':
        specs = [
            spec.replace(' x.x', ' {}'.format(variant.get('numpy', "")))
            for spec in specs
        ]
    subpackages = []
    dependencies = []
    # ones that get filtered from actual versioning, to exclude them from the hash calculation
    append_specs = []
    for spec in specs:
        if not exclude_pattern or not exclude_pattern.match(spec):
            is_subpackage = False
            spec_name = spec.split()[0]
            for entry in m.get_section('outputs'):
                name = entry.get('name')
                if name == spec_name:
                    subpackages.append(' '.join((name, m.version())))
                    is_subpackage = True
            if not is_subpackage:
                dependencies.append(spec)
            for key, value in variant.items():
                if dash_or_under.sub("",
                                     key) == dash_or_under.sub("", spec_name):
                    dependencies.append(" ".join((spec_name, value)))
        else:
            append_specs.append(spec)
    prefix = m.config.host_prefix if env == 'host' else m.config.build_prefix
    try:
        actions = environ.get_install_actions(prefix, index, dependencies,
                                              m.config)
    except UnsatisfiableError as e:
        # we'll get here if the environment is unsatisfiable
        raise DependencyNeedsBuildingError(e)

    specs = actions_to_pins(actions)
    return specs + subpackages + append_specs
示例#6
0
def get_upstream_pins(m, dependencies, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    dependencies = [strip_channel(dep) for dep in dependencies]
    random_string = ''.join(random.choice(string.ascii_uppercase + string.digits)
                            for _ in range(10))
    with TemporaryDirectory(suffix=random_string) as tmpdir:
        actions = environ.get_install_actions(tmpdir, index, dependencies,
                                            m.config)
        additional_specs = []
        linked_packages = actions['LINK']
        # edit the plan to download all necessary packages
        for key in ('LINK', 'EXTRACT', 'UNLINK'):
            if key in actions:
                del actions[key]
        # this should be just downloading packages.  We don't need to extract them -
        #    we read contents directly
        if actions:
            plan.execute_actions(actions, index, verbose=m.config.debug)

            pkgs_dirs = cc.pkgs_dirs + list(m.config.bldpkgs_dirs)
            for pkg in linked_packages:
                for pkgs_dir in pkgs_dirs:
                    if hasattr(pkg, 'dist_name'):
                        pkg_dist = pkg.dist_name
                    else:
                        pkg = strip_channel(pkg)
                        pkg_dist = pkg.split(' ')[0]

                    pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                    pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                    if os.path.isdir(pkg_dir):
                        downstream_file = os.path.join(pkg_dir, 'info/pin_downstream')
                        if os.path.isfile(downstream_file):
                            additional_specs.extend(open(downstream_file).read().splitlines())
                        break
                    elif os.path.isfile(pkg_file):
                        extra_specs = utils.package_has_file(pkg_file, 'info/pin_downstream')
                        if extra_specs:
                            additional_specs.extend(extra_specs.splitlines())
                        break
                    elif utils.conda_43():
                        # TODO: this is a vile hack reaching into conda's internals. Replace with
                        #    proper conda API when available.
                        try:
                            pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                        index=index)
                            pfe.execute()
                            for pkgs_dir in pkgs_dirs:
                                pkg_file = os.path.join(pkgs_dir, pkg.dist_name + '.tar.bz2')
                                if os.path.isfile(pkg_file):
                                    extra_specs = utils.package_has_file(pkg_file,
                                                                        'info/pin_downstream')
                                    if extra_specs:
                                        additional_specs.extend(extra_specs.splitlines())
                                    break
                            break
                        except KeyError:
                            raise DependencyNeedsBuildingError(packages=[pkg.name])
                else:
                    raise RuntimeError("Didn't find expected package {} in package cache ({})"
                                        .format(pkg_dist, pkgs_dirs))

    return additional_specs
示例#7
0
def get_install_actions(prefix,
                        specs,
                        env,
                        retries=0,
                        subdir=None,
                        verbose=True,
                        debug=False,
                        locking=True,
                        bldpkgs_dirs=None,
                        timeout=900,
                        disable_pip=False,
                        max_env_retry=3,
                        output_folder=None,
                        channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    specs = list(specs)
    if specs:
        specs.extend(create_default_packages)
    if verbose or debug:
        capture = contextlib.contextmanager(lambda: (yield))
        if debug:
            conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts, _ = get_build_index(subdir,
                                         list(bldpkgs_dirs)[0],
                                         output_folder=output_folder,
                                         channel_urls=channel_urls,
                                         debug=debug,
                                         verbose=verbose,
                                         locking=locking,
                                         timeout=timeout)
    specs = tuple(
        utils.ensure_valid_spec(spec) for spec in specs
        if not str(spec).endswith('@'))

    if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions
            and last_index_ts >= index_ts):
        actions = cached_actions[(specs, env, subdir, channel_urls,
                                  disable_pip)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except (NoPackagesFoundError, UnsatisfiableError) as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError,
                        AssertionError, BuildLockError) as exc:
                    if 'lock' in str(exc):
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)
                          or isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(
                            locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s. "
                                "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            retries=retries + 1,
                            subdir=subdir,
                            verbose=verbose,
                            debug=debug,
                            locking=locking,
                            bldpkgs_dirs=tuple(bldpkgs_dirs),
                            timeout=timeout,
                            disable_pip=disable_pip,
                            max_env_retry=max_env_retry,
                            output_folder=output_folder,
                            channel_urls=tuple(channel_urls))
                    else:
                        log.error(
                            "Failed to get install actions, max retries exceeded."
                        )
                        raise
        if disable_pip:
            for pkg in ('pip', 'setuptools', 'wheel'):
                # specs are the raw specifications, not the conda-derived actual specs
                #   We're testing that pip etc. are manually specified
                if not any(
                        re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep))
                        for dep in specs):
                    actions['LINK'] = [
                        spec for spec in actions['LINK'] if spec.name != pkg
                    ]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls,
                        disable_pip)] = actions.copy()
        last_index_ts = index_ts
    return actions
示例#8
0
def get_upstream_pins(m, dependencies, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    dependencies = [strip_channel(dep) for dep in dependencies]
    # Add _tmp here to prevent creating the build_prefix too early. This is because, when
    # dirty is set, we skip calling create_env if the folder already exists.
    actions = environ.get_install_actions(m.config.build_prefix[:-4] + "_tmp",
                                          index, dependencies, m.config)
    additional_specs = []
    linked_packages = actions['LINK']
    # edit the plan to download all necessary packages
    if 'LINK' in actions:
        del actions['LINK']
    if 'EXTRACT' in actions:
        del actions['EXTRACT']
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if actions:
        plan.execute_actions(actions, index, verbose=m.config.debug)

        pkgs_dirs = cc.pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir,
                                                   'info/pin_downstream')
                    if os.path.isfile(downstream_file):
                        additional_specs.extend(
                            open(downstream_file).read().splitlines())
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/pin_downstream')
                    if extra_specs:
                        additional_specs.extend(extra_specs.splitlines())
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with proper
                    #    conda API when available.
                    try:
                        pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                      index=index)
                        pfe.execute()
                        for pkgs_dir in pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/pin_downstream')
                                if extra_specs:
                                    additional_specs.extend(
                                        extra_specs.splitlines())
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, pkgs_dirs))

    return additional_specs
示例#9
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    additional_specs = []
    linked_packages = actions.get('LINK', [])
    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)

    if actions:
        execute_actions(actions, index, verbose=m.config.debug)
        ignore_list = utils.ensure_list(
            m.get_value('build/ignore_run_exports'))

        _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in _pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir, 'info/run_exports')
                    if os.path.isfile(downstream_file):
                        specs = open(downstream_file).read().splitlines()
                        additional_specs.extend(
                            _filter_run_exports(specs, ignore_list))
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/run_exports')
                    if extra_specs:
                        # exclude packages pinning themselves (makes no sense)
                        extra_specs = [
                            spec for spec in extra_specs.splitlines()
                            if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                        ]
                        additional_specs.extend(
                            _filter_run_exports(extra_specs, ignore_list))
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with
                    #    proper conda API when available.
                    try:
                        try:
                            # the conda 4.4 API uses a single `link_prefs` kwarg
                            # whereas conda 4.3 used `index` and `link_dists` kwargs
                            pfe = ProgressiveFetchExtract(
                                link_prefs=(index[pkg], ))
                        except TypeError:
                            # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                            pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                          index=index)
                        with utils.LoggingContext():
                            pfe.execute()
                        for pkgs_dir in _pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/run_exports')
                                if extra_specs:
                                    specs = extra_specs.splitlines()
                                    additional_specs.extend(
                                        _filter_run_exports(
                                            specs, ignore_list))
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, _pkgs_dirs))
    return additional_specs
示例#10
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        stub_subpackages=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    need_source_download = True
    unsatisfiable_variants = []
    packages_needing_building = set()

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants

    if variants:
        recipe_requirements = metadata.extract_requirements_text()
        for variant in variants:
            mv = metadata.copy()

            # this determines which variants were used, and thus which ones should be locked for
            #     future rendering
            mv.final = False
            mv.config.variant = {}
            mv.parse_again(permit_undefined_jinja=True, stub_subpackages=True)
            vars_in_recipe = set(mv.undefined_jinja_vars)

            mv.config.variant = variant
            conform_dict = {}
            for key in vars_in_recipe:
                if PY3 and hasattr(recipe_requirements, 'decode'):
                    recipe_requirements = recipe_requirements.decode()
                elif not PY3 and hasattr(recipe_requirements, 'encode'):
                    recipe_requirements = recipe_requirements.encode()
                # We use this variant in the top-level recipe.
                # constrain the stored variants to only this version in the output
                #     variant mapping
                if re.search(r"\s+\{\{\s*%s\s*(?:.*?)?\}\}" % key,
                             recipe_requirements):
                    conform_dict[key] = variant[key]

            compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                          recipe_requirements)
            if compiler_matches:
                from conda_build.jinja_context import native_compiler
                for match in compiler_matches:
                    compiler_key = '{}_compiler'.format(match)
                    conform_dict[compiler_key] = variant.get(
                        compiler_key, native_compiler(match, mv.config))
                    conform_dict['target_platform'] = variant[
                        'target_platform']

            build_reqs = mv.meta.get('requirements', {}).get('build', [])
            if 'python' in build_reqs:
                conform_dict['python'] = variant['python']

            mv.config.variants = conform_variants_to_value(
                mv.config.variants, conform_dict)
            # reset this to our current variant to go ahead
            mv.config.variant = variant

            if 'target_platform' in variant:
                mv.config.host_subdir = variant['target_platform']
            if not need_reparse_in_env:
                try:
                    mv.parse_until_resolved(stub_subpackages=stub_subpackages)
                    need_source_download = (bool(mv.meta.get('source'))
                                            and not mv.needs_source_for_render
                                            and
                                            not os.listdir(mv.config.work_dir))
                    # if python is in the build specs, but doesn't have a specific associated
                    #    version, make sure to add one to newly parsed 'requirements/build'.
                    if build_reqs and 'python' in build_reqs:
                        python_version = 'python {}'.format(
                            mv.config.variant['python'])
                        mv.meta['requirements']['build'] = [
                            python_version
                            if re.match('^python(?:$| .*)', pkg) else pkg
                            for pkg in mv.meta['requirements']['build']
                        ]
                    fm = finalize_metadata(mv)
                    rendered_metadata[fm.dist()] = (fm, need_source_download,
                                                    need_reparse_in_env)
                except DependencyNeedsBuildingError as e:
                    unsatisfiable_variants.append(variant)
                    packages_needing_building.update(set(e.packages))
                    if permit_unsatisfiable_variants:
                        rendered_metadata[mv.dist()] = (mv,
                                                        need_source_download,
                                                        need_reparse_in_env)
                    continue
                except exceptions.UnableToParseMissingSetuptoolsDependencies:
                    need_reparse_in_env = True
                except:
                    raise
            else:
                # computes hashes based on whatever the current specs are - not the final specs
                #    This is a deduplication step.  Any variants that end up identical because a
                #    given variant is not used in a recipe are effectively ignored, though we still
                #    pay the price to parse for that variant.
                rendered_metadata[mv.build_id()] = (mv, need_source_download,
                                                    need_reparse_in_env)
    else:
        rendered_metadata['base_recipe'] = (metadata, need_source_download,
                                            need_reparse_in_env)

    if unsatisfiable_variants and not permit_unsatisfiable_variants:
        raise DependencyNeedsBuildingError(packages=packages_needing_building)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
示例#11
0
def get_install_actions(prefix,
                        index,
                        specs,
                        config,
                        retries=0,
                        timestamp=0,
                        subdir=None):
    global _cached_install_actions
    global _last_timestamp
    log = utils.get_logger(__name__)
    if config.verbose:
        capture = contextlib.contextmanager(lambda: (yield))
    else:
        capture = utils.capture
    actions = {'LINK': []}
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)
    specs = tuple(_ensure_valid_spec(spec) for spec in specs)
    if (specs, subdir, timestamp
        ) in _cached_install_actions and timestamp > _last_timestamp:
        actions = _cached_install_actions[(specs, subdir, timestamp)]
    else:
        if specs:
            # this is hiding output like:
            #    Fetching package metadata ...........
            #    Solving package specifications: ..........
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                    # Experimenting with getting conda to create fewer Resolve objects
                    #   Experiment failed, seemingly due to conda's statefulness.  Packages could
                    #   not be found.
                    # index_timestamp=timestamp)
                except NoPackagesFoundError as exc:
                    # Attempt to skeleton packages it can't find
                    packages = [x.split(" ")[0] for x in exc.pkgs]
                    for pkg in packages:
                        if pkg.startswith("r-"):
                            api.skeletonize([pkg], "cran")
                        else:
                            api.skeletonize([pkg], "pypi")
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError,
                        AssertionError) as exc:
                    if 'lock' in str(exc):
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)
                          or isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(config)
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < config.max_env_retry:
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                        actions = get_install_actions(prefix,
                                                      index,
                                                      specs,
                                                      config,
                                                      retries=retries + 1,
                                                      timestamp=timestamp)
                    else:
                        log.error(
                            "Failed to get install actions, max retries exceeded."
                        )
                        raise
            if config.disable_pip:
                actions['LINK'] = [
                    spec for spec in actions['LINK']
                    if not spec.startswith('pip-')
                    and not spec.startswith('setuptools-')
                ]
        utils.trim_empty_keys(actions)
        _cached_install_actions[(specs, subdir, timestamp)] = actions
        _last_timestamp = timestamp
    return actions
示例#12
0
def get_upstream_pins(m, actions, index):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""
    additional_specs = []
    linked_packages = actions.get('LINK', [])
    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly
    if actions:
        execute_actions(actions, index, verbose=m.config.debug)

        _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
        for pkg in linked_packages:
            for pkgs_dir in _pkgs_dirs:
                if hasattr(pkg, 'dist_name'):
                    pkg_dist = pkg.dist_name
                else:
                    pkg = strip_channel(pkg)
                    pkg_dist = pkg.split(' ')[0]

                pkg_dir = os.path.join(pkgs_dir, pkg_dist)
                pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')
                if os.path.isdir(pkg_dir):
                    downstream_file = os.path.join(pkg_dir, 'info/run_exports')
                    if os.path.isfile(downstream_file):
                        additional_specs.extend(
                            open(downstream_file).read().splitlines())
                    break
                elif os.path.isfile(pkg_file):
                    extra_specs = utils.package_has_file(
                        pkg_file, 'info/run_exports')
                    if extra_specs:
                        # exclude packages pinning themselves (makes no sense)
                        extra_specs = [
                            spec for spec in extra_specs
                            if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                        ]
                        additional_specs.extend(extra_specs.splitlines())
                    break
                elif utils.conda_43():
                    # TODO: this is a vile hack reaching into conda's internals. Replace with
                    #    proper conda API when available.
                    try:
                        pfe = ProgressiveFetchExtract(link_dists=[pkg],
                                                      index=index)
                        pfe.execute()
                        for pkgs_dir in _pkgs_dirs:
                            pkg_file = os.path.join(pkgs_dir,
                                                    pkg.dist_name + '.tar.bz2')
                            if os.path.isfile(pkg_file):
                                extra_specs = utils.package_has_file(
                                    pkg_file, 'info/run_exports')
                                if extra_specs:
                                    additional_specs.extend(
                                        extra_specs.splitlines())
                                break
                        break
                    except KeyError:
                        raise DependencyNeedsBuildingError(packages=[pkg.name])
            else:
                raise RuntimeError(
                    "Didn't find expected package {} in package cache ({})".
                    format(pkg_dist, _pkgs_dirs))

    return additional_specs