Example #1
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build',
                                                    permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        host_reqs = utils.ensure_list(m.get_value('requirements/host'))
        # ensure host_reqs is present, so in-place modification below is actually in-place
        requirements = m.meta.setdefault('requirements', {})
        requirements['host'] = host_reqs

        if not host_reqs:
            matching_output = [out for out in m.meta.get('outputs', []) if
                               out.get('name') == m.name()]
            if matching_output:
                requirements = utils.expand_reqs(matching_output[0].get('requirements', {}))
                matching_output[0]['requirements'] = requirements
                host_reqs = requirements.setdefault('host', [])
        # in-place modification of above thingie
        host_reqs.extend(extra_run_specs_from_build.get('strong', []))

        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host',
                                                    permit_unsatisfiable_variants, exclude_pattern)
        if m.noarch or m.noarch_python:
            extra_run_specs = set(extra_run_specs_from_host.get('noarch', []))
        else:
            extra_run_specs = set(extra_run_specs_from_host.get('strong', []) +
                                  extra_run_specs_from_host.get('weak', []) +
                                  extra_run_specs_from_build.get('strong', []))
    else:
        host_deps = []
        host_unsat = []
        if m.noarch or m.noarch_python:
            if m.build_is_host:
                extra_run_specs = set(extra_run_specs_from_build.get('noarch', []))
                build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('noarch', []))
            else:
                extra_run_specs = set([])
                build_deps = set(build_deps or [])
        else:
            extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
            if m.build_is_host:
                extra_run_specs.update(extra_run_specs_from_build.get('weak', []))
                build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', []))
            else:
                host_deps = set(extra_run_specs_from_build.get('strong', []))

    run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run')))

    for section, deps in (('build', build_deps), ('host', host_deps), ('run', run_deps)):
        if deps:
            requirements[section] = list(deps)

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Example #2
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(
        m, 'build', permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        if m.meta.get('requirements', {}).get('host'):
            host_reqs = m.meta['requirements']['host']
        else:
            matching_output = [
                out for out in m.meta['outputs']
                if out.get('name') == m.name()
            ]
            if matching_output:
                requirements = utils.expand_reqs(
                    matching_output[0]['requirements'])
                matching_output[0]['requirements'] = requirements
                host_reqs = requirements['host']
        # in-place modification of above thingie
        host_reqs.extend(extra_run_specs_from_build.get('strong', []))

        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(
            m, 'host', permit_unsatisfiable_variants, exclude_pattern)
        extra_run_specs = set(
            extra_run_specs_from_host.get('strong', []) +
            extra_run_specs_from_host.get('weak', []) +
            extra_run_specs_from_build.get('strong', []))
    else:
        host_deps = []
        host_unsat = []
        extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
        if not m.uses_new_style_compiler_activation:
            extra_run_specs.update(extra_run_specs_from_build.get('weak', []))

    run_deps = extra_run_specs | set(utils.ensure_list(
        requirements.get('run')))

    for section, deps in (('build', build_deps), ('host', host_deps),
                          ('run', run_deps)):
        if deps:
            requirements[section] = list(deps)

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Example #3
0
def add_upstream_pins(m, permit_unsatisfiable_variants, exclude_pattern):
    """Applies run_exports from any build deps to host and run sections"""
    # if we have host deps, they're more important than the build deps.
    requirements = m.meta.get('requirements', {})
    build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(m, 'build',
                                                    permit_unsatisfiable_variants, exclude_pattern)

    # is there a 'host' section?
    if m.is_cross:
        # this must come before we read upstream pins, because it will enforce things
        #      like vc version from the compiler.
        host_reqs = utils.ensure_list(m.get_value('requirements/host'))
        # ensure host_reqs is present, so in-place modification below is actually in-place
        requirements = m.meta.setdefault('requirements', {})
        requirements['host'] = host_reqs

        if not host_reqs:
            matching_output = [out for out in m.meta.get('outputs', []) if
                               out.get('name') == m.name()]
            if matching_output:
                requirements = utils.expand_reqs(matching_output[0].get('requirements', {}))
                matching_output[0]['requirements'] = requirements
                host_reqs = requirements.setdefault('host', [])
        # in-place modification of above thingie
        host_reqs.extend(extra_run_specs_from_build.get('strong', []))

        host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(m, 'host',
                                                    permit_unsatisfiable_variants, exclude_pattern)
        extra_run_specs = set(extra_run_specs_from_host.get('strong', []) +
                              extra_run_specs_from_host.get('weak', []) +
                              extra_run_specs_from_build.get('strong', []))
    else:
        host_deps = []
        host_unsat = []
        extra_run_specs = set(extra_run_specs_from_build.get('strong', []))
        if m.build_is_host:
            extra_run_specs.update(extra_run_specs_from_build.get('weak', []))
            build_deps = set(build_deps or []).update(extra_run_specs_from_build.get('weak', []))
        else:
            host_deps = set(extra_run_specs_from_build.get('strong', []))

    run_deps = extra_run_specs | set(utils.ensure_list(requirements.get('run')))

    for section, deps in (('build', build_deps), ('host', host_deps), ('run', run_deps)):
        if deps:
            requirements[section] = list(deps)

    m.meta['requirements'] = requirements
    return build_unsat, host_unsat
Example #4
0
def finalize_metadata(m,
                      parent_metadata=None,
                      permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        m.final = True
    else:
        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name,
                                            variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc)
                for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        output = parent_metadata.get_rendered_output(m.name(),
                                                     variant=m.config.variant)

        is_top_level = True
        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}
            else:
                is_top_level = False

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(
            m.name()))
        build_unsat, host_unsat = add_upstream_pins(
            m, permit_unsatisfiable_variants, exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(
            m,
            pinning_env,
            m.config.variant,
            exclude_pattern=exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {
            dep.split()[0]: " ".join(dep.split()[1:])
            for dep in full_build_deps
        }

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in run_deps
        ]
        versioned_run_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_run_deps
        ]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({
                get_pin_from_build(m, dep, full_build_dep_versions)
                for dep in test_deps
            })
            versioned_test_deps = [
                utils.ensure_valid_spec(spec, warn=True)
                for spec in versioned_test_deps
            ]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn(
                "Returning non-final recipe for {}; one or more dependencies "
                "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    if is_top_level:
        parent_metadata = m
    return m
Example #5
0
def finalize_metadata(m, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    exclude_pattern = None
    excludes = set(m.config.variant.get('ignore_version', []))

    for key in m.config.variant.get('pin_run_as_build', {}).keys():
        if key in excludes:
            excludes.remove(key)

    output_excludes = set()
    if hasattr(m, 'other_outputs'):
        output_excludes = set(name
                              for (name, variant) in m.other_outputs.keys())

    if excludes or output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in excludes
                                              | output_excludes))

    # extract the topmost section where variables are defined, and put it on top of the
    #     requirements for a particular output
    # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
    extract_pattern = r'(.*)package:'
    template_string = '\n'.join((
        m.get_recipe_text(extract_pattern=extract_pattern,
                          force_top_level=True),
        # second item: the requirements text for this particular metadata
        #    object (might be output)
        m.extract_requirements_text()))

    requirements = (yaml.safe_load(
        m._get_contents(permit_undefined_jinja=False,
                        template_string=template_string))
                    or {}).get('requirements', {})
    requirements = utils.expand_reqs(requirements)

    if isfile(m.requirements_path) and not requirements.get('run'):
        requirements['run'] = specs_from_url(m.requirements_path)

    rendered_metadata = m.copy()
    rendered_metadata.meta['requirements'] = requirements
    utils.insert_variant_versions(rendered_metadata.meta['requirements'],
                                  rendered_metadata.config.variant, 'build')
    utils.insert_variant_versions(rendered_metadata.meta['requirements'],
                                  rendered_metadata.config.variant, 'host')

    build_unsat, host_unsat = add_upstream_pins(rendered_metadata,
                                                permit_unsatisfiable_variants,
                                                exclude_pattern)

    # here's where we pin run dependencies to their build time versions.  This happens based
    #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
    #     names to have this behavior.
    if output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in output_excludes))
    pinning_env = 'host' if m.is_cross else 'build'

    build_reqs = requirements.get(pinning_env, [])
    # if python is in the build specs, but doesn't have a specific associated
    #    version, make sure to add one
    if build_reqs and 'python' in build_reqs:
        build_reqs.append('python {}'.format(m.config.variant['python']))
        rendered_metadata.meta['requirements'][pinning_env] = build_reqs

    full_build_deps, _, _ = get_env_dependencies(
        rendered_metadata,
        pinning_env,
        rendered_metadata.config.variant,
        exclude_pattern=exclude_pattern,
        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
    full_build_dep_versions = {
        dep.split()[0]: " ".join(dep.split()[1:])
        for dep in full_build_deps
    }

    run_deps = rendered_metadata.meta.get('requirements', {}).get('run', [])

    versioned_run_deps = [
        get_pin_from_build(rendered_metadata, dep, full_build_dep_versions)
        for dep in run_deps
    ]
    versioned_run_deps = [
        utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps
    ]

    requirements = rendered_metadata.meta.get('requirements', {})
    requirements['run'] = versioned_run_deps

    rendered_metadata.meta['requirements'] = requirements

    # append other requirements, such as python.app, appropriately
    rendered_metadata.append_requirements()

    if rendered_metadata.pin_depends == 'strict':
        rendered_metadata.meta['requirements'][
            'run'] = environ.get_pinned_deps(rendered_metadata, 'run')
    test_deps = rendered_metadata.get_value('test/requires')
    if test_deps:
        versioned_test_deps = list({
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in test_deps
        })
        versioned_test_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_test_deps
        ]
        rendered_metadata.meta['test']['requires'] = versioned_test_deps
    rendered_metadata.meta['extra'][
        'copy_test_source_files'] = m.config.copy_test_source_files

    # if source/path is relative, then the output package makes no sense at all.  The next
    #   best thing is to hard-code the absolute path.  This probably won't exist on any
    #   system other than the original build machine, but at least it will work there.
    if m.meta.get('source'):
        if 'path' in m.meta['source'] and not os.path.isabs(
                m.meta['source']['path']):
            rendered_metadata.meta['source']['path'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['path']))
        elif ('git_url' in m.meta['source'] and not (
                # absolute paths are not relative paths
                os.path.isabs(m.meta['source']['git_url']) or
                # real urls are not relative paths
                ":" in m.meta['source']['git_url'])):
            rendered_metadata.meta['source']['git_url'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['git_url']))

    if not rendered_metadata.meta.get('build'):
        rendered_metadata.meta['build'] = {}

    if build_unsat or host_unsat:
        rendered_metadata.final = False
        log = utils.get_logger(__name__)
        log.warn("Returning non-final recipe for {}; one or more dependencies "
                 "was unsatisfiable:\nBuild: {}\nHost: {}".format(
                     rendered_metadata.dist(), build_unsat, host_unsat))
    else:
        rendered_metadata.final = True
    return rendered_metadata
Example #6
0
def get_upstream_pins(m, actions, env):
    """Download packages from specs, then inspect each downloaded package for additional
    downstream dependency specs.  Return these additional specs."""

    # this attribute is added in the first pass of finalize_outputs_pass
    extract_pattern = r'(.*)package:'
    template_string = '\n'.join((
        m.get_recipe_text(extract_pattern=extract_pattern,
                          force_top_level=True),
        # second item: the requirements text for this particular metadata
        #    object (might be output)
        m.extract_requirements_text())).rstrip()
    raw_specs = {}
    if template_string:
        raw_specs = yaml.safe_load(
            m._get_contents(permit_undefined_jinja=False,
                            template_string=template_string)) or {}

    env_specs = utils.expand_reqs(raw_specs.get('requirements',
                                                {})).get(env, [])
    explicit_specs = [req.split(' ')[0]
                      for req in env_specs] if env_specs else []
    linked_packages = actions.get('LINK', [])
    linked_packages = [
        pkg for pkg in linked_packages if pkg.name in explicit_specs
    ]

    # edit the plan to download all necessary packages
    for key in ('LINK', 'EXTRACT', 'UNLINK'):
        if key in actions:
            del actions[key]
    # this should be just downloading packages.  We don't need to extract them -
    #    we read contents directly

    index, index_ts = get_build_index(getattr(m.config,
                                              '{}_subdir'.format(env)),
                                      bldpkgs_dir=m.config.bldpkgs_dir,
                                      output_folder=m.config.output_folder,
                                      channel_urls=m.config.channel_urls,
                                      debug=m.config.debug,
                                      verbose=m.config.verbose,
                                      locking=m.config.locking,
                                      timeout=m.config.timeout)
    if 'FETCH' in actions or 'EXTRACT' in actions:
        # this is to force the download
        execute_actions(actions, index, verbose=m.config.debug)
    ignore_list = utils.ensure_list(m.get_value('build/ignore_run_exports'))

    _pkgs_dirs = pkgs_dirs + list(m.config.bldpkgs_dirs)
    additional_specs = {}
    for pkg in linked_packages:
        pkg_loc = None
        if hasattr(pkg, 'dist_name'):
            pkg_dist = pkg.dist_name
        else:
            pkg = strip_channel(pkg)
            pkg_dist = pkg.split(' ')[0]
        for pkgs_dir in _pkgs_dirs:
            pkg_dir = os.path.join(pkgs_dir, pkg_dist)
            pkg_file = os.path.join(pkgs_dir, pkg_dist + '.tar.bz2')

            if os.path.isdir(pkg_dir):
                pkg_loc = pkg_dir
                break
            elif os.path.isfile(pkg_file):
                pkg_loc = pkg_file
                break

        # ran through all pkgs_dirs, and did not find package or folder.  Download it.
        # TODO: this is a vile hack reaching into conda's internals. Replace with
        #    proper conda API when available.
        if not pkg_loc and conda_43:
            try:
                # the conda 4.4 API uses a single `link_prefs` kwarg
                # whereas conda 4.3 used `index` and `link_dists` kwargs
                pfe = ProgressiveFetchExtract(link_prefs=(index[pkg], ))
            except TypeError:
                # TypeError: __init__() got an unexpected keyword argument 'link_prefs'
                pfe = ProgressiveFetchExtract(link_dists=[pkg], index=index)
            with utils.LoggingContext():
                pfe.execute()
            for pkg_dir in pkgs_dirs:
                _loc = os.path.join(pkg_dir, index[pkg].fn)
                if os.path.isfile(_loc):
                    pkg_loc = _loc
                    break

        specs = {}
        if os.path.isdir(pkg_loc):
            downstream_file = os.path.join(pkg_dir, 'info/run_exports')
            if os.path.isfile(downstream_file):
                with open(downstream_file) as f:
                    specs = {'weak': [spec.rstrip() for spec in f.readlines()]}
            # a later attempt: record more info in the yaml file, to support "strong" run exports
            elif os.path.isfile(downstream_file + '.yaml'):
                with open(downstream_file + '.yaml') as f:
                    specs = yaml.safe_load(f)
        elif os.path.isfile(pkg_file):
            legacy_specs = utils.package_has_file(pkg_file, 'info/run_exports')
            specs_yaml = utils.package_has_file(pkg_file,
                                                'info/run_exports.yaml')
            if specs:
                # exclude packages pinning themselves (makes no sense)
                specs = {
                    'weak': [
                        spec.rstrip() for spec in legacy_specs.splitlines()
                        if not spec.startswith(pkg_dist.rsplit('-', 2)[0])
                    ]
                }
            elif specs_yaml:
                specs = yaml.safe_load(specs_yaml)

        additional_specs = utils.merge_dicts_of_lists(
            additional_specs, _filter_run_exports(specs, ignore_list))
    return additional_specs
Example #7
0
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        rendered_metadata = m.copy()
        rendered_metadata.final = True
    else:

        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name, variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        parent_metadata = parent_metadata.copy()
        parent_metadata.config.variant = m.config.variant
        output = parent_metadata.get_rendered_output(m.name())

        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                        m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        build_unsat, host_unsat = add_upstream_pins(m,
                                                    permit_unsatisfiable_variants,
                                                    exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output
        # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {}))
        # run_reqs = run_reqs.get('run', [])
        # if run_reqs:
        #     requirements['run'] = run_reqs
        # m.meta['requirements'] = requirements
        # m.meta['build'] = rerendered_output.meta.get('build', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(m, pinning_env,
                                        m.config.variant,
                                        exclude_pattern=exclude_pattern,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:])
                                   for dep in full_build_deps}

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions)
                            for dep in run_deps]
        versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True)
                              for spec in versioned_run_deps]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(
                m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions)
                                        for dep in test_deps})
            versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True)
                                for spec in versioned_test_deps]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn("Returning non-final recipe for {}; one or more dependencies "
                    "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    return m