Beispiel #1
0
def test_insert_variant_versions(testing_metadata):
    testing_metadata.meta['requirements']['build'] = ['python', 'numpy 1.13']
    testing_metadata.config.variant = {'python': '2.7', 'numpy': '1.11'}
    utils.insert_variant_versions(testing_metadata, 'build')
    # this one gets inserted
    assert 'python 2.7.*' in testing_metadata.meta['requirements']['build']
    # this one should not be altered
    assert 'numpy 1.13' in testing_metadata.meta['requirements']['build']
    # the overall length does not change
    assert len(testing_metadata.meta['requirements']['build']) == 2
Beispiel #2
0
def test_insert_variant_versions(testing_metadata):
    testing_metadata.meta['requirements']['build'] = ['python', 'numpy 1.13']
    testing_metadata.config.variant = {'python': '2.7', 'numpy': '1.11'}
    utils.insert_variant_versions(testing_metadata.meta.get('requirements', {}),
                                  testing_metadata.config.variant, 'build')
    # this one gets inserted
    assert 'python 2.7.*' in testing_metadata.meta['requirements']['build']
    # this one should not be altered
    assert 'numpy 1.13' in testing_metadata.meta['requirements']['build']
    # the overall length does not change
    assert len(testing_metadata.meta['requirements']['build']) == 2
Beispiel #3
0
def finalize_metadata(m,
                      parent_metadata=None,
                      permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        m.final = True
    else:
        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name,
                                            variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc)
                for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        output = parent_metadata.get_rendered_output(m.name(),
                                                     variant=m.config.variant)

        is_top_level = True
        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}
            else:
                is_top_level = False

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(
            m.name()))
        build_unsat, host_unsat = add_upstream_pins(
            m, permit_unsatisfiable_variants, exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(
                r'(?:^{}(?:\s|$|\Z))'.format(exc) for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(
            m,
            pinning_env,
            m.config.variant,
            exclude_pattern=exclude_pattern,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {
            dep.split()[0]: " ".join(dep.split()[1:])
            for dep in full_build_deps
        }

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in run_deps
        ]
        versioned_run_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_run_deps
        ]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({
                get_pin_from_build(m, dep, full_build_dep_versions)
                for dep in test_deps
            })
            versioned_test_deps = [
                utils.ensure_valid_spec(spec, warn=True)
                for spec in versioned_test_deps
            ]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn(
                "Returning non-final recipe for {}; one or more dependencies "
                "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    if is_top_level:
        parent_metadata = m
    return m
Beispiel #4
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants
    squished_variants = list_of_dicts_to_dict_of_lists(variants)

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()
    for variant in variants:
        mv = metadata.copy()

        # this determines which variants were used, and thus which ones should be locked for
        #     future rendering
        mv.final = False
        mv.config.variant = {}
        mv.parse_again(permit_undefined_jinja=True,
                       allow_no_other_outputs=True,
                       bypass_env_check=True)
        vars_in_recipe = set(mv.undefined_jinja_vars)

        mv.config.variant = variant
        conform_dict = {}
        for key in vars_in_recipe:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            if re.search(r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, recipe_text):
                if key in variant:
                    variant_index = squished_variants[key].index(variant[key])
                    zipped_keys = [key]
                    if 'zip_keys' in variant:
                        zip_key_groups = variant['zip_keys']
                        if zip_key_groups and not isinstance(
                                zip_key_groups[0], list):
                            zip_key_groups = [zip_key_groups]
                        for group in zip_key_groups:
                            if key in group:
                                zipped_keys = group
                                break
                    for zipped_key in zipped_keys:
                        conform_dict[zipped_key] = squished_variants[
                            zipped_key][variant_index]

        conform_dict.update({
            key: val
            for key, val in variant.items()
            if key in mv.meta.get('requirements').get('build', []) +
            mv.meta.get('requirements').get('host', [])
        })

        compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                      recipe_requirements)
        if compiler_matches:
            from conda_build.jinja_context import native_compiler
            for match in compiler_matches:
                compiler_key = '{}_compiler'.format(match)
                conform_dict[compiler_key] = variant.get(
                    compiler_key, native_compiler(match, mv.config))
                conform_dict['target_platform'] = variant['target_platform']

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        mv.config.variants = conform_variants_to_value(mv.config.variants,
                                                       conform_dict)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()
        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (bool(mv.meta.get('source'))
                                and not mv.needs_source_for_render
                                and not os.listdir(mv.config.work_dir))
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one to newly parsed 'requirements/build'.
        for env in ('build', 'host', 'run'):
            utils.insert_variant_versions(mv, env)
        fm = mv.copy()
        # HACK: trick conda-build into thinking this is final, and computing a hash based
        #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
        #     our ability to differentiate configurations
        fm.final = True
        rendered_metadata[fm.dist()] = (mv, need_source_download, None)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Beispiel #5
0
def finalize_metadata(m, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    exclude_pattern = None
    excludes = set(m.config.variant.get('ignore_version', []))

    for key in m.config.variant.get('pin_run_as_build', {}).keys():
        if key in excludes:
            excludes.remove(key)

    output_excludes = set()
    if hasattr(m, 'other_outputs'):
        output_excludes = set(name
                              for (name, variant) in m.other_outputs.keys())

    if excludes or output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in excludes
                                              | output_excludes))

    # extract the topmost section where variables are defined, and put it on top of the
    #     requirements for a particular output
    # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
    extract_pattern = r'(.*)package:'
    template_string = '\n'.join((
        m.get_recipe_text(extract_pattern=extract_pattern,
                          force_top_level=True),
        # second item: the requirements text for this particular metadata
        #    object (might be output)
        m.extract_requirements_text()))

    requirements = (yaml.safe_load(
        m._get_contents(permit_undefined_jinja=False,
                        template_string=template_string))
                    or {}).get('requirements', {})
    requirements = utils.expand_reqs(requirements)

    if isfile(m.requirements_path) and not requirements.get('run'):
        requirements['run'] = specs_from_url(m.requirements_path)

    rendered_metadata = m.copy()
    rendered_metadata.meta['requirements'] = requirements
    utils.insert_variant_versions(rendered_metadata.meta['requirements'],
                                  rendered_metadata.config.variant, 'build')
    utils.insert_variant_versions(rendered_metadata.meta['requirements'],
                                  rendered_metadata.config.variant, 'host')

    build_unsat, host_unsat = add_upstream_pins(rendered_metadata,
                                                permit_unsatisfiable_variants,
                                                exclude_pattern)

    # here's where we pin run dependencies to their build time versions.  This happens based
    #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
    #     names to have this behavior.
    if output_excludes:
        exclude_pattern = re.compile('|'.join('(?:^{}(?:\s|$|\Z))'.format(exc)
                                              for exc in output_excludes))
    pinning_env = 'host' if m.is_cross else 'build'

    build_reqs = requirements.get(pinning_env, [])
    # if python is in the build specs, but doesn't have a specific associated
    #    version, make sure to add one
    if build_reqs and 'python' in build_reqs:
        build_reqs.append('python {}'.format(m.config.variant['python']))
        rendered_metadata.meta['requirements'][pinning_env] = build_reqs

    full_build_deps, _, _ = get_env_dependencies(
        rendered_metadata,
        pinning_env,
        rendered_metadata.config.variant,
        exclude_pattern=exclude_pattern,
        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
    full_build_dep_versions = {
        dep.split()[0]: " ".join(dep.split()[1:])
        for dep in full_build_deps
    }

    run_deps = rendered_metadata.meta.get('requirements', {}).get('run', [])

    versioned_run_deps = [
        get_pin_from_build(rendered_metadata, dep, full_build_dep_versions)
        for dep in run_deps
    ]
    versioned_run_deps = [
        utils.ensure_valid_spec(spec, warn=True) for spec in versioned_run_deps
    ]

    requirements = rendered_metadata.meta.get('requirements', {})
    requirements['run'] = versioned_run_deps

    rendered_metadata.meta['requirements'] = requirements

    # append other requirements, such as python.app, appropriately
    rendered_metadata.append_requirements()

    if rendered_metadata.pin_depends == 'strict':
        rendered_metadata.meta['requirements'][
            'run'] = environ.get_pinned_deps(rendered_metadata, 'run')
    test_deps = rendered_metadata.get_value('test/requires')
    if test_deps:
        versioned_test_deps = list({
            get_pin_from_build(m, dep, full_build_dep_versions)
            for dep in test_deps
        })
        versioned_test_deps = [
            utils.ensure_valid_spec(spec, warn=True)
            for spec in versioned_test_deps
        ]
        rendered_metadata.meta['test']['requires'] = versioned_test_deps
    rendered_metadata.meta['extra'][
        'copy_test_source_files'] = m.config.copy_test_source_files

    # if source/path is relative, then the output package makes no sense at all.  The next
    #   best thing is to hard-code the absolute path.  This probably won't exist on any
    #   system other than the original build machine, but at least it will work there.
    if m.meta.get('source'):
        if 'path' in m.meta['source'] and not os.path.isabs(
                m.meta['source']['path']):
            rendered_metadata.meta['source']['path'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['path']))
        elif ('git_url' in m.meta['source'] and not (
                # absolute paths are not relative paths
                os.path.isabs(m.meta['source']['git_url']) or
                # real urls are not relative paths
                ":" in m.meta['source']['git_url'])):
            rendered_metadata.meta['source']['git_url'] = os.path.normpath(
                os.path.join(m.path, m.meta['source']['git_url']))

    if not rendered_metadata.meta.get('build'):
        rendered_metadata.meta['build'] = {}

    if build_unsat or host_unsat:
        rendered_metadata.final = False
        log = utils.get_logger(__name__)
        log.warn("Returning non-final recipe for {}; one or more dependencies "
                 "was unsatisfiable:\nBuild: {}\nHost: {}".format(
                     rendered_metadata.dist(), build_unsat, host_unsat))
    else:
        rendered_metadata.final = True
    return rendered_metadata
Beispiel #6
0
def finalize_metadata(m, parent_metadata=None, permit_unsatisfiable_variants=False):
    """Fully render a recipe.  Fill in versions for build/host dependencies."""
    if not parent_metadata:
        parent_metadata = m
    if m.skip():
        rendered_metadata = m.copy()
        rendered_metadata.final = True
    else:

        exclude_pattern = None
        excludes = set(m.config.variant.get('ignore_version', []))

        for key in m.config.variant.get('pin_run_as_build', {}).keys():
            if key in excludes:
                excludes.remove(key)

        output_excludes = set()
        if hasattr(m, 'other_outputs'):
            output_excludes = set(name for (name, variant) in m.other_outputs.keys())

        if excludes or output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in excludes | output_excludes))

        parent_recipe = m.meta.get('extra', {}).get('parent_recipe', {})

        # extract the topmost section where variables are defined, and put it on top of the
        #     requirements for a particular output
        # Re-parse the output from the original recipe, so that we re-consider any jinja2 stuff
        parent_metadata = parent_metadata.copy()
        parent_metadata.config.variant = m.config.variant
        output = parent_metadata.get_rendered_output(m.name())

        if output:
            if 'package' in output or 'name' not in output:
                # it's just a top-level recipe
                output = {'name': m.name()}

            if not parent_recipe or parent_recipe['name'] == m.name():
                combine_top_level_metadata_with_output(m, output)
            requirements = utils.expand_reqs(output.get('requirements', {}))
            m.meta['requirements'] = requirements

        if m.meta.get('requirements'):
            utils.insert_variant_versions(m.meta['requirements'],
                                          m.config.variant, 'build')
            utils.insert_variant_versions(m.meta['requirements'],
                                        m.config.variant, 'host')

        m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        build_unsat, host_unsat = add_upstream_pins(m,
                                                    permit_unsatisfiable_variants,
                                                    exclude_pattern)
        # getting this AFTER add_upstream_pins is important, because that function adds deps
        #     to the metadata.
        requirements = m.meta.get('requirements', {})

        # this is hacky, but it gets the jinja2 things like pin_compatible from the rendered output
        # rerendered_output = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
        # run_reqs = utils.expand_reqs(rerendered_output.meta.get('requirements', {}))
        # run_reqs = run_reqs.get('run', [])
        # if run_reqs:
        #     requirements['run'] = run_reqs
        # m.meta['requirements'] = requirements
        # m.meta['build'] = rerendered_output.meta.get('build', {})

        # here's where we pin run dependencies to their build time versions.  This happens based
        #     on the keys in the 'pin_run_as_build' key in the variant, which is a list of package
        #     names to have this behavior.
        if output_excludes:
            exclude_pattern = re.compile(r'|'.join(r'(?:^{}(?:\s|$|\Z))'.format(exc)
                                            for exc in output_excludes))
        pinning_env = 'host' if m.is_cross else 'build'

        build_reqs = requirements.get(pinning_env, [])
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one
        if build_reqs and 'python' in build_reqs:
            build_reqs.append('python {}'.format(m.config.variant['python']))
            m.meta['requirements'][pinning_env] = build_reqs

        full_build_deps, _, _ = get_env_dependencies(m, pinning_env,
                                        m.config.variant,
                                        exclude_pattern=exclude_pattern,
                                        permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        full_build_dep_versions = {dep.split()[0]: " ".join(dep.split()[1:])
                                   for dep in full_build_deps}

        if isfile(m.requirements_path) and not requirements.get('run'):
            requirements['run'] = specs_from_url(m.requirements_path)
        run_deps = requirements.get('run', [])

        versioned_run_deps = [get_pin_from_build(m, dep, full_build_dep_versions)
                            for dep in run_deps]
        versioned_run_deps = [utils.ensure_valid_spec(spec, warn=True)
                              for spec in versioned_run_deps]
        requirements[pinning_env] = full_build_deps
        requirements['run'] = versioned_run_deps

        m.meta['requirements'] = requirements

        # append other requirements, such as python.app, appropriately
        m.append_requirements()

        if m.pin_depends == 'strict':
            m.meta['requirements']['run'] = environ.get_pinned_deps(
                m, 'run')
        test_deps = m.get_value('test/requires')
        if test_deps:
            versioned_test_deps = list({get_pin_from_build(m, dep, full_build_dep_versions)
                                        for dep in test_deps})
            versioned_test_deps = [utils.ensure_valid_spec(spec, warn=True)
                                for spec in versioned_test_deps]
            m.meta['test']['requires'] = versioned_test_deps
        extra = m.meta.get('extra', {})
        extra['copy_test_source_files'] = m.config.copy_test_source_files
        m.meta['extra'] = extra

        # if source/path is relative, then the output package makes no sense at all.  The next
        #   best thing is to hard-code the absolute path.  This probably won't exist on any
        #   system other than the original build machine, but at least it will work there.
        if m.meta.get('source'):
            if 'path' in m.meta['source']:
                source_path = m.meta['source']['path']
                os.path.expanduser(source_path)
                if not os.path.isabs(source_path):
                    m.meta['source']['path'] = os.path.normpath(
                        os.path.join(m.path, source_path))
                elif ('git_url' in m.meta['source'] and not (
                        # absolute paths are not relative paths
                        os.path.isabs(m.meta['source']['git_url']) or
                        # real urls are not relative paths
                        ":" in m.meta['source']['git_url'])):
                    m.meta['source']['git_url'] = os.path.normpath(
                        os.path.join(m.path, m.meta['source']['git_url']))

        if not m.meta.get('build'):
            m.meta['build'] = {}

        _simplify_to_exact_constraints(m)

        if build_unsat or host_unsat:
            m.final = False
            log = utils.get_logger(__name__)
            log.warn("Returning non-final recipe for {}; one or more dependencies "
                    "was unsatisfiable:".format(m.dist()))
            if build_unsat:
                log.warn("Build: {}".format(build_unsat))
            if host_unsat:
                log.warn("Host: {}".format(host_unsat))
        else:
            m.final = True
    return m
Beispiel #7
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        variants = filter_by_key_value(variants, 'python',
                                       variants[0]['python'],
                                       'noarch_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants
    squished_variants = list_of_dicts_to_dict_of_lists(variants)

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    for variant in variants:
        mv = metadata.copy()
        mv.config.variant = variant
        used_variables = mv.get_used_loop_vars()
        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        # handle grouping from zip_keys for everything in conform_dict
        if 'zip_keys' in variant:
            zip_key_groups = variant['zip_keys']
            if zip_key_groups and not isinstance(zip_key_groups[0], list):
                zip_key_groups = [zip_key_groups]
            for key in list(conform_dict.keys()):
                zipped_keys = None
                for group in zip_key_groups:
                    if key in group:
                        zipped_keys = group
                    if zipped_keys:
                        # here we zip the values of the keys, so that we can match the combination
                        zipped_values = list(
                            zip(*[
                                squished_variants[key] for key in zipped_keys
                            ]))
                        variant_index = zipped_values.index(
                            tuple(variant[key] for key in zipped_keys))
                        for zipped_key in zipped_keys:
                            conform_dict[zipped_key] = squished_variants[
                                zipped_key][variant_index]

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']
        if 'r-base' in build_reqs or 'r-base' in host_reqs:
            conform_dict['r_base'] = variant['r_base']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(
                mv.config.variants, key, values,
                'distribute_variants_reduction') or mv.config.variants)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(
            mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (not mv.needs_source_for_render
                                or not mv.source_provided)

        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one to newly parsed 'requirements/build'.
        for env in ('build', 'host', 'run'):
            utils.insert_variant_versions(mv.meta.get('requirements', {}),
                                          mv.config.variant, env)

        fm = mv.copy()
        # HACK: trick conda-build into thinking this is final, and computing a hash based
        #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
        #     our ability to differentiate configurations
        fm.final = True
        rendered_metadata[(fm.dist(),
                           fm.config.variant.get('target_platform', fm.config.subdir),
                           tuple((var, fm.config.variant[var])
                                 for var in fm.get_used_loop_vars()))] = \
                                    (mv, need_source_download, None)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())