Пример #1
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(
        m, 'build', permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        m.meta['requirements']['host'].extend(
            extra_run_specs_from_build.get('strong', []))
        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(
            m, 'host', permit_unsatisfiable_variants, exclude_pattern)
        extra_run_specs = set(
            extra_run_specs_from_host.get('strong', []) +
            extra_run_specs_from_host.get('weak', []) +
            extra_run_specs_from_build.get('strong', []))
    else:
        # redo this, but lump in the host deps too, to catch any run_exports stuff that gets merged
        #    when build platform is same as host
        build_deps, build_actions, build_unsat = get_env_dependencies(
            m,
            'build',
            m.config.variant,
            exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        m.config.build_prefix_override = not m.uses_new_style_compiler_activation
        host_deps = []
        host_unsat = []
        extra_run_specs = set(
            extra_run_specs_from_build.get('strong', []) +
            extra_run_specs_from_build.get('weak', []))

    run_deps = extra_run_specs | set(utils.ensure_list(
        requirements.get('run')))

    requirements.update({
        'build':
        [utils.ensure_valid_spec(spec, warn=True) for spec in build_deps],
        'host':
        [utils.ensure_valid_spec(spec, warn=True) for spec in host_deps],
        'run': [utils.ensure_valid_spec(spec, warn=True) for spec in run_deps]
    })

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Пример #2
0
def _simplify_to_exact_constraints(metadata):
    """
    For metapackages that are pinned exactly, we want to bypass all dependencies that may
    be less exact.
    """
    requirements = metadata.meta.get('requirements', {})
    # collect deps on a per-section basis
    for section in 'build', 'host', 'run':
        deps = utils.ensure_list(requirements.get(section, []))
        deps_dict = defaultdict(list)
        for dep in deps:
            spec_parts = utils.ensure_valid_spec(dep).split()
            name = spec_parts[0]
            if len(spec_parts) > 1:
                deps_dict[name].append(spec_parts[1:])
            else:
                deps_dict[name].append([])

        deps_list = []
        for name, values in deps_dict.items():
            exact_pins = [dep for dep in values if len(dep) > 1]
            if len(values) == 1 and not any(values):
                deps_list.append(name)
            elif exact_pins:
                if not all(pin == exact_pins[0] for pin in exact_pins):
                    raise ValueError("Conflicting exact pins: {}".format(exact_pins))
                else:
                    deps_list.append(' '.join([name] + exact_pins[0]))
            else:
                deps_list.extend(' '.join([name] + dep) for dep in values if dep)
        if section in requirements and deps_list:
            requirements[section] = deps_list
    metadata.meta['requirements'] = requirements
Пример #3
0
def _simplify_to_exact_constraints(metadata):
    """
    For metapackages that are pinned exactly, we want to bypass all dependencies that may
    be less exact.
    """
    requirements = metadata.meta.get('requirements', {})
    # collect deps on a per-section basis
    for section in 'build', 'host', 'run':
        deps = utils.ensure_list(requirements.get(section, []))
        deps_dict = defaultdict(list)
        for dep in deps:
            spec_parts = utils.ensure_valid_spec(dep).split()
            name = spec_parts[0]
            if len(spec_parts) > 1:
                deps_dict[name].append(spec_parts[1:])
            else:
                deps_dict[name].append([])

        deps_list = []
        for name, values in deps_dict.items():
            exact_pins = [dep for dep in values if len(dep) > 1]
            if len(values) == 1 and not any(values):
                deps_list.append(name)
            elif exact_pins:
                if not all(pin == exact_pins[0] for pin in exact_pins):
                    raise ValueError("Conflicting exact pins: {}".format(exact_pins))
                else:
                    deps_list.append(' '.join([name] + exact_pins[0]))
            else:
                deps_list.extend(' '.join([name] + dep) for dep in values if dep)
        if section in requirements and deps_list:
            requirements[section] = deps_list
    metadata.meta['requirements'] = requirements
Пример #4
0
def test_ensure_valid_spec():
    assert utils.ensure_valid_spec('python') == 'python'
    assert utils.ensure_valid_spec('python 2.7') == 'python 2.7.*'
    assert utils.ensure_valid_spec('python 2.7.2') == 'python 2.7.2.*'
    assert utils.ensure_valid_spec('python 2.7.12 0') == 'python 2.7.12 0'
    assert utils.ensure_valid_spec('python >=2.7,<2.8') == 'python >=2.7,<2.8'
    assert utils.ensure_valid_spec('numpy x.x') == 'numpy x.x'
    assert utils.ensure_valid_spec(utils.MatchSpec('numpy x.x')) == utils.MatchSpec('numpy x.x')
Пример #5
0
def test_ensure_valid_spec():
    assert utils.ensure_valid_spec('python') == 'python'
    assert utils.ensure_valid_spec('python 2.7') == 'python 2.7.*'
    assert utils.ensure_valid_spec('python 2.7.2') == 'python 2.7.2.*'
    assert utils.ensure_valid_spec('python 2.7.12 0') == 'python 2.7.12 0'
    assert utils.ensure_valid_spec('python >=2.7,<2.8') == 'python >=2.7,<2.8'
    assert utils.ensure_valid_spec('numpy x.x') == 'numpy x.x'
    assert utils.ensure_valid_spec(utils.MatchSpec('numpy x.x')) == utils.MatchSpec('numpy x.x')
Пример #6
0
def finalize_metadata(m,
                      parent_metadata=None,
                      permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        m.final = True
    else:
        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name,
                                            variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc)
                for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        output = parent_metadata.get_rendered_output(m.name(),
                                                     variant=m.config.variant)

        is_top_level = True
        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}
            else:
                is_top_level = False

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(
            m.name()))
        build_unsat, host_unsat = add_upstream_pins(
            m, permit_unsatisfiable_variants, exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(
            m,
            pinning_env,
            m.config.variant,
            exclude_pattern=exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {
            dep.split()[0]: " ".join(dep.split()[1:])
            for dep in full_build_deps
        }

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in run_deps
        ]
        versioned_run_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_run_deps
        ]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({
                get_pin_from_build(m, dep, full_build_dep_versions)
                for dep in test_deps
            })
            versioned_test_deps = [
                utils.ensure_valid_spec(spec, warn=True)
                for spec in versioned_test_deps
            ]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn(
                "Returning non-final recipe for {}; one or more dependencies "
                "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    if is_top_level:
        parent_metadata = m
    return m
Пример #7
0
def get_install_actions(prefix,
                        specs,
                        env,
                        retries=0,
                        subdir=None,
                        verbose=True,
                        debug=False,
                        locking=True,
                        bldpkgs_dirs=None,
                        timeout=900,
                        disable_pip=False,
                        max_env_retry=3,
                        output_folder=None,
                        channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    specs = list(specs)
    if specs:
        specs.extend(create_default_packages)
    if verbose or debug:
        capture = contextlib.contextmanager(lambda: (yield))
        if debug:
            conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts, _ = get_build_index(subdir,
                                         list(bldpkgs_dirs)[0],
                                         output_folder=output_folder,
                                         channel_urls=channel_urls,
                                         debug=debug,
                                         verbose=verbose,
                                         locking=locking,
                                         timeout=timeout)
    specs = tuple(
        utils.ensure_valid_spec(spec) for spec in specs
        if not str(spec).endswith('@'))

    if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions
            and last_index_ts >= index_ts):
        actions = cached_actions[(specs, env, subdir, channel_urls,
                                  disable_pip)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except (NoPackagesFoundError, UnsatisfiableError) as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError,
                        AssertionError, BuildLockError) as exc:
                    if 'lock' in str(exc):
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)
                          or isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(
                            locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s. "
                                "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            retries=retries + 1,
                            subdir=subdir,
                            verbose=verbose,
                            debug=debug,
                            locking=locking,
                            bldpkgs_dirs=tuple(bldpkgs_dirs),
                            timeout=timeout,
                            disable_pip=disable_pip,
                            max_env_retry=max_env_retry,
                            output_folder=output_folder,
                            channel_urls=tuple(channel_urls))
                    else:
                        log.error(
                            "Failed to get install actions, max retries exceeded."
                        )
                        raise
        if disable_pip:
            for pkg in ('pip', 'setuptools', 'wheel'):
                # specs are the raw specifications, not the conda-derived actual specs
                #   We're testing that pip etc. are manually specified
                if not any(
                        re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep))
                        for dep in specs):
                    actions['LINK'] = [
                        spec for spec in actions['LINK'] if spec.name != pkg
                    ]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls,
                        disable_pip)] = actions.copy()
        last_index_ts = index_ts
    return actions
Пример #8
0
def finalize_metadata(m, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    exclude_pattern = None
    excludes = set(m.config.variant.get('ignore_version', []))

    for key in m.config.variant.get('pin_run_as_build', {}).keys():
        if key in excludes:
            excludes.remove(key)

    output_excludes = set()
    if hasattr(m, 'other_outputs'):
        output_excludes = set(name
                              for (name, variant) in m.other_outputs.keys())

    if excludes or output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in excludes
                                              | output_excludes))

    # extract the topmost section where variables are defined, and put it on top of the
    #     requirements for a particular output
    # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
    extract_pattern = r'(.*)package:'
    template_string = '\n'.join((
        m.get_recipe_text(extract_pattern=extract_pattern,
                          force_top_level=True),
        # second item: the requirements text for this particular metadata
        #    object (might be output)
        m.extract_requirements_text()))

    requirements = (yaml.safe_load(
        m._get_contents(permit_undefined_jinja=False,
                        template_string=template_string))
                    or {}).get('requirements', {})
    requirements = utils.expand_reqs(requirements)

    if isfile(m.requirements_path) and not requirements.get('run'):
        requirements['run'] = specs_from_url(m.requirements_path)

    rendered_metadata = m.copy()
    rendered_metadata.meta['requirements'] = requirements
    utils.insert_variant_versions(rendered_metadata.meta['requirements'],
                                  rendered_metadata.config.variant, 'build')
    utils.insert_variant_versions(rendered_metadata.meta['requirements'],
                                  rendered_metadata.config.variant, 'host')

    build_unsat, host_unsat = add_upstream_pins(rendered_metadata,
                                                permit_unsatisfiable_variants,
                                                exclude_pattern)

    # here's where we pin run dependencies to their build time versions.  This happens based
    #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
    #     names to have this behavior.
    if output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in output_excludes))
    pinning_env = 'host' if m.is_cross else 'build'

    build_reqs = requirements.get(pinning_env, [])
    # if python is in the build specs, but doesn't have a specific associated
    #    version, make sure to add one
    if build_reqs and 'python' in build_reqs:
        build_reqs.append('python {}'.format(m.config.variant['python']))
        rendered_metadata.meta['requirements'][pinning_env] = build_reqs

    full_build_deps, _, _ = get_env_dependencies(
        rendered_metadata,
        pinning_env,
        rendered_metadata.config.variant,
        exclude_pattern=exclude_pattern,
        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
    full_build_dep_versions = {
        dep.split()[0]: " ".join(dep.split()[1:])
        for dep in full_build_deps
    }

    run_deps = rendered_metadata.meta.get('requirements', {}).get('run', [])

    versioned_run_deps = [
        get_pin_from_build(rendered_metadata, dep, full_build_dep_versions)
        for dep in run_deps
    ]
    versioned_run_deps = [
        utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps
    ]

    requirements = rendered_metadata.meta.get('requirements', {})
    requirements['run'] = versioned_run_deps

    rendered_metadata.meta['requirements'] = requirements

    # append other requirements, such as python.app, appropriately
    rendered_metadata.append_requirements()

    if rendered_metadata.pin_depends == 'strict':
        rendered_metadata.meta['requirements'][
            'run'] = environ.get_pinned_deps(rendered_metadata, 'run')
    test_deps = rendered_metadata.get_value('test/requires')
    if test_deps:
        versioned_test_deps = list({
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in test_deps
        })
        versioned_test_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_test_deps
        ]
        rendered_metadata.meta['test']['requires'] = versioned_test_deps
    rendered_metadata.meta['extra'][
        'copy_test_source_files'] = m.config.copy_test_source_files

    # if source/path is relative, then the output package makes no sense at all.  The next
    #   best thing is to hard-code the absolute path.  This probably won't exist on any
    #   system other than the original build machine, but at least it will work there.
    if m.meta.get('source'):
        if 'path' in m.meta['source'] and not os.path.isabs(
                m.meta['source']['path']):
            rendered_metadata.meta['source']['path'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['path']))
        elif ('git_url' in m.meta['source'] and not (
                # absolute paths are not relative paths
                os.path.isabs(m.meta['source']['git_url']) or
                # real urls are not relative paths
                ":" in m.meta['source']['git_url'])):
            rendered_metadata.meta['source']['git_url'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['git_url']))

    if not rendered_metadata.meta.get('build'):
        rendered_metadata.meta['build'] = {}

    if build_unsat or host_unsat:
        rendered_metadata.final = False
        log = utils.get_logger(__name__)
        log.warn("Returning non-final recipe for {}; one or more dependencies "
                 "was unsatisfiable:\nBuild: {}\nHost: {}".format(
                     rendered_metadata.dist(), build_unsat, host_unsat))
    else:
        rendered_metadata.final = True
    return rendered_metadata
Пример #9
0
def finalize_metadata(m, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    exclude_pattern = None
    excludes = set(m.config.variant.get('ignore_version', []))

    for key in m.config.variant.get('pin_run_as_build', {}).keys():
        if key in excludes:
            excludes.remove(key)

    output_excludes = set()
    if hasattr(m, 'other_outputs'):
        output_excludes = set(name
                              for (name, variant) in m.other_outputs.keys())

    if excludes or output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in excludes
                                              | output_excludes))

    build_reqs = m.meta.get('requirements', {}).get('build', [])
    # if python is in the build specs, but doesn't have a specific associated
    #    version, make sure to add one
    if build_reqs and 'python' in build_reqs:
        build_reqs.append('python {}'.format(m.config.variant['python']))
        m.meta['requirements']['build'] = build_reqs

    # if we have host deps, they're more important than the build deps.
    build_deps, build_actions, build_unsat = get_env_dependencies(
        m,
        'build',
        m.config.variant,
        exclude_pattern,
        permit_unsatisfiable_variants=permit_unsatisfiable_variants)

    extra_run_specs_from_build = get_upstream_pins(m, build_actions, 'build')

    # is there a 'host' section?
    if m.is_cross:
        host_reqs = m.get_value('requirements/host')
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if host_reqs:
            if 'python' in host_reqs:
                host_reqs.append('python {}'.format(
                    m.config.variant['python']))
            host_reqs.extend(extra_run_specs_from_build.get('strong', []))
            m.meta['requirements']['host'] = [
                utils.ensure_valid_spec(spec) for spec in host_reqs
            ]
        host_deps, host_actions, host_unsat = get_env_dependencies(
            m,
            'host',
            m.config.variant,
            exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        # extend host deps with strong build run exports.  This is important for things like
        #    vc feature activation to work correctly in the host env.
        extra_run_specs_from_host = get_upstream_pins(m, host_actions, 'host')
        extra_run_specs = set(
            extra_run_specs_from_host.get('strong', []) +
            extra_run_specs_from_host.get('weak', []) +
            extra_run_specs_from_build.get('strong', []))
    else:
        m.config.build_prefix_override = not m.uses_new_style_compiler_activation
        host_deps = []
        host_unsat = None
        extra_run_specs = (extra_run_specs_from_build.get('strong', []) +
                           extra_run_specs_from_build.get('weak', []))

    # here's where we pin run dependencies to their build time versions.  This happens based
    #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
    #     names to have this behavior.
    requirements = m.meta.get('requirements', {})
    run_deps = requirements.get('run', [])
    if output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in output_excludes))
    pinning_env = 'host' if m.is_cross else 'build'
    full_build_deps, _, _ = get_env_dependencies(
        m,
        pinning_env,
        m.config.variant,
        exclude_pattern=exclude_pattern,
        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
    full_build_dep_versions = {
        dep.split()[0]: " ".join(dep.split()[1:])
        for dep in full_build_deps
    }
    versioned_run_deps = [
        get_pin_from_build(m, dep, full_build_dep_versions) for dep in run_deps
    ]
    versioned_run_deps.extend(extra_run_specs)
    versioned_run_deps = [
        utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps
    ]

    for _env, values in (('build', build_deps), ('host', host_deps),
                         ('run', versioned_run_deps)):
        if values:
            requirements[_env] = list({strip_channel(dep) for dep in values})
    rendered_metadata = m.copy()
    rendered_metadata.meta['requirements'] = requirements

    if rendered_metadata.pin_depends == 'strict':
        rendered_metadata.meta['requirements'][
            'run'] = environ.get_pinned_deps(rendered_metadata, 'run')
    test_deps = rendered_metadata.get_value('test/requires')
    if test_deps:
        versioned_test_deps = list({
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in test_deps
        })
        versioned_test_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_test_deps
        ]
        rendered_metadata.meta['test']['requires'] = versioned_test_deps
    rendered_metadata.meta['extra'][
        'copy_test_source_files'] = m.config.copy_test_source_files

    # if source/path is relative, then the output package makes no sense at all.  The next
    #   best thing is to hard-code the absolute path.  This probably won't exist on any
    #   system other than the original build machine, but at least it will work there.
    if m.meta.get('source'):
        if 'path' in m.meta['source'] and not os.path.isabs(
                m.meta['source']['path']):
            rendered_metadata.meta['source']['path'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['path']))
        elif ('git_url' in m.meta['source'] and not (
                # absolute paths are not relative paths
                os.path.isabs(m.meta['source']['git_url']) or
                # real urls are not relative paths
                ":" in m.meta['source']['git_url'])):
            rendered_metadata.meta['source']['git_url'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['git_url']))

    if not rendered_metadata.meta.get('build'):
        rendered_metadata.meta['build'] = {}

    if build_unsat or host_unsat:
        rendered_metadata.final = False
        log = utils.get_logger(__name__)
        log.warn("Returning non-final recipe for {}; one or more dependencies "
                 "was unsatisfiable:\nBuild: {}\nHost: {}".format(
                     rendered_metadata.dist(), build_unsat, host_unsat))
    else:
        rendered_metadata.final = True
    return rendered_metadata
Пример #10
0
def get_install_actions(prefix, specs, env, retries=0, subdir=None,
                        verbose=True, debug=False, locking=True,
                        bldpkgs_dirs=None, timeout=90, disable_pip=False,
                        max_env_retry=3, output_folder=None, channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    specs = list(specs)
    if verbose:
        capture = contextlib.contextmanager(lambda: (yield))
    elif debug:
        capture = contextlib.contextmanager(lambda: (yield))
        conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder,
                                      channel_urls=channel_urls, debug=debug, verbose=verbose,
                                      locking=locking, timeout=timeout)
    specs = tuple(utils.ensure_valid_spec(spec) for spec in specs)

    if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions and
            last_index_ts >= index_ts):
        actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except NoPackagesFoundError as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError,
                        CondaError, AssertionError) as exc:
                    if 'lock' in str(exc):
                        log.warn("failed to get install actions, retrying.  exception was: %s",
                                str(exc))
                    elif ('requires a minimum conda version' in str(exc) or
                            'link a source that does not' in str(exc) or
                            isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn("I think conda ended up with a partial extraction for %s. "
                                        "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn("failed to get install actions, retrying.  exception was: %s",
                                str(exc))
                        actions = get_install_actions(prefix, tuple(specs), env,
                                                      retries=retries + 1,
                                                      subdir=subdir,
                                                      verbose=verbose,
                                                      debug=debug,
                                                      locking=locking,
                                                      bldpkgs_dirs=tuple(bldpkgs_dirs),
                                                      timeout=timeout,
                                                      disable_pip=disable_pip,
                                                      max_env_retry=max_env_retry,
                                                      output_folder=output_folder,
                                                      channel_urls=tuple(channel_urls))
                    else:
                        log.error("Failed to get install actions, max retries exceeded.")
                        raise
        if disable_pip:
            for pkg in ('pip', 'setuptools', 'wheel'):
                # specs are the raw specifications, not the conda-derived actual specs
                #   We're testing that pip etc. are manually specified
                if not any(re.match('^%s(?:$| .*)' % pkg, str(dep)) for dep in specs):
                    actions['LINK'] = [spec for spec in actions['LINK'] if spec.name != pkg]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy()
        last_index_ts = index_ts
    return actions
Пример #11
0
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        rendered_metadata = m.copy()
        rendered_metadata.final = True
    else:

        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name, variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        parent_metadata = parent_metadata.copy()
        parent_metadata.config.variant = m.config.variant
        output = parent_metadata.get_rendered_output(m.name())

        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                        m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        build_unsat, host_unsat = add_upstream_pins(m,
                                                    permit_unsatisfiable_variants,
                                                    exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output
        # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {}))
        # run_reqs = run_reqs.get('run', [])
        # if run_reqs:
        #     requirements['run'] = run_reqs
        # m.meta['requirements'] = requirements
        # m.meta['build'] = rerendered_output.meta.get('build', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(m, pinning_env,
                                        m.config.variant,
                                        exclude_pattern=exclude_pattern,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:])
                                   for dep in full_build_deps}

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions)
                            for dep in run_deps]
        versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True)
                              for spec in versioned_run_deps]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(
                m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions)
                                        for dep in test_deps})
            versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True)
                                for spec in versioned_test_deps]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn("Returning non-final recipe for {}; one or more dependencies "
                    "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    return m