Exemple #1
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        stub_subpackages=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    need_source_download = True
    unsatisfiable_variants = []
    packages_needing_building = set()

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants

    if variants:
        recipe_requirements = metadata.extract_requirements_text()
        for variant in variants:
            mv = metadata.copy()

            # this determines which variants were used, and thus which ones should be locked for
            #     future rendering
            mv.final = False
            mv.config.variant = {}
            mv.parse_again(permit_undefined_jinja=True, stub_subpackages=True)
            vars_in_recipe = set(mv.undefined_jinja_vars)

            mv.config.variant = variant
            conform_dict = {}
            for key in vars_in_recipe:
                if PY3 and hasattr(recipe_requirements, 'decode'):
                    recipe_requirements = recipe_requirements.decode()
                elif not PY3 and hasattr(recipe_requirements, 'encode'):
                    recipe_requirements = recipe_requirements.encode()
                # We use this variant in the top-level recipe.
                # constrain the stored variants to only this version in the output
                #     variant mapping
                if re.search(r"\s+\{\{\s*%s\s*(?:.*?)?\}\}" % key,
                             recipe_requirements):
                    conform_dict[key] = variant[key]

            compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                          recipe_requirements)
            if compiler_matches:
                from conda_build.jinja_context import native_compiler
                for match in compiler_matches:
                    compiler_key = '{}_compiler'.format(match)
                    conform_dict[compiler_key] = variant.get(
                        compiler_key, native_compiler(match, mv.config))
                    conform_dict['target_platform'] = variant[
                        'target_platform']

            build_reqs = mv.meta.get('requirements', {}).get('build', [])
            if 'python' in build_reqs:
                conform_dict['python'] = variant['python']

            mv.config.variants = conform_variants_to_value(
                mv.config.variants, conform_dict)
            # reset this to our current variant to go ahead
            mv.config.variant = variant

            if 'target_platform' in variant:
                mv.config.host_subdir = variant['target_platform']
            if not need_reparse_in_env:
                try:
                    mv.parse_until_resolved(stub_subpackages=stub_subpackages)
                    need_source_download = (bool(mv.meta.get('source'))
                                            and not mv.needs_source_for_render
                                            and
                                            not os.listdir(mv.config.work_dir))
                    # if python is in the build specs, but doesn't have a specific associated
                    #    version, make sure to add one to newly parsed 'requirements/build'.
                    if build_reqs and 'python' in build_reqs:
                        python_version = 'python {}'.format(
                            mv.config.variant['python'])
                        mv.meta['requirements']['build'] = [
                            python_version
                            if re.match('^python(?:$| .*)', pkg) else pkg
                            for pkg in mv.meta['requirements']['build']
                        ]
                    fm = finalize_metadata(mv)
                    rendered_metadata[fm.dist()] = (fm, need_source_download,
                                                    need_reparse_in_env)
                except DependencyNeedsBuildingError as e:
                    unsatisfiable_variants.append(variant)
                    packages_needing_building.update(set(e.packages))
                    if permit_unsatisfiable_variants:
                        rendered_metadata[mv.dist()] = (mv,
                                                        need_source_download,
                                                        need_reparse_in_env)
                    continue
                except exceptions.UnableToParseMissingSetuptoolsDependencies:
                    need_reparse_in_env = True
                except:
                    raise
            else:
                # computes hashes based on whatever the current specs are - not the final specs
                #    This is a deduplication step.  Any variants that end up identical because a
                #    given variant is not used in a recipe are effectively ignored, though we still
                #    pay the price to parse for that variant.
                rendered_metadata[mv.build_id()] = (mv, need_source_download,
                                                    need_reparse_in_env)
    else:
        rendered_metadata['base_recipe'] = (metadata, need_source_download,
                                            need_reparse_in_env)

    if unsatisfiable_variants and not permit_unsatisfiable_variants:
        raise DependencyNeedsBuildingError(packages=packages_needing_building)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Exemple #2
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants
    squished_variants = list_of_dicts_to_dict_of_lists(variants)

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()
    for variant in variants:
        mv = metadata.copy()

        # this determines which variants were used, and thus which ones should be locked for
        #     future rendering
        mv.final = False
        mv.config.variant = {}
        mv.parse_again(permit_undefined_jinja=True,
                       allow_no_other_outputs=True,
                       bypass_env_check=True)
        vars_in_recipe = set(mv.undefined_jinja_vars)

        mv.config.variant = variant
        conform_dict = {}
        for key in vars_in_recipe:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            if re.search(r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, recipe_text):
                if key in variant:
                    variant_index = squished_variants[key].index(variant[key])
                    zipped_keys = [key]
                    if 'zip_keys' in variant:
                        zip_key_groups = variant['zip_keys']
                        if zip_key_groups and not isinstance(
                                zip_key_groups[0], list):
                            zip_key_groups = [zip_key_groups]
                        for group in zip_key_groups:
                            if key in group:
                                zipped_keys = group
                                break
                    for zipped_key in zipped_keys:
                        conform_dict[zipped_key] = squished_variants[
                            zipped_key][variant_index]

        conform_dict.update({
            key: val
            for key, val in variant.items()
            if key in mv.meta.get('requirements').get('build', []) +
            mv.meta.get('requirements').get('host', [])
        })

        compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                      recipe_requirements)
        if compiler_matches:
            from conda_build.jinja_context import native_compiler
            for match in compiler_matches:
                compiler_key = '{}_compiler'.format(match)
                conform_dict[compiler_key] = variant.get(
                    compiler_key, native_compiler(match, mv.config))
                conform_dict['target_platform'] = variant['target_platform']

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        mv.config.variants = conform_variants_to_value(mv.config.variants,
                                                       conform_dict)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()
        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (bool(mv.meta.get('source'))
                                and not mv.needs_source_for_render
                                and not os.listdir(mv.config.work_dir))
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one to newly parsed 'requirements/build'.
        for env in ('build', 'host', 'run'):
            utils.insert_variant_versions(mv, env)
        fm = mv.copy()
        # HACK: trick conda-build into thinking this is final, and computing a hash based
        #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
        #     our ability to differentiate configurations
        fm.final = True
        rendered_metadata[fm.dist()] = (mv, need_source_download, None)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Exemple #3
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_reparse_in_env = False
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants

    recipe_requirements = metadata.extract_requirements_text()
    for variant in variants:
        mv = metadata.copy()

        # this determines which variants were used, and thus which ones should be locked for
        #     future rendering
        mv.final = False
        mv.config.variant = {}
        mv.parse_again(permit_undefined_jinja=True,
                       allow_no_other_outputs=True,
                       bypass_env_check=True)
        vars_in_recipe = set(mv.undefined_jinja_vars)

        mv.config.variant = variant
        conform_dict = {}
        for key in vars_in_recipe:
            if PY3 and hasattr(recipe_requirements, 'decode'):
                recipe_requirements = recipe_requirements.decode()
            elif not PY3 and hasattr(recipe_requirements, 'encode'):
                recipe_requirements = recipe_requirements.encode()
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            if re.search(r"\s+\{\{\s*%s\s*(?:.*?)?\}\}" % key,
                         recipe_requirements):
                conform_dict[key] = variant[key]

        compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                      recipe_requirements)
        if compiler_matches:
            from conda_build.jinja_context import native_compiler
            for match in compiler_matches:
                compiler_key = '{}_compiler'.format(match)
                conform_dict[compiler_key] = variant.get(
                    compiler_key, native_compiler(match, mv.config))
                conform_dict['target_platform'] = variant['target_platform']

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']

        mv.config.variants = conform_variants_to_value(mv.config.variants,
                                                       conform_dict)

        if not need_reparse_in_env:
            mv.parse_until_resolved(
                allow_no_other_outputs=allow_no_other_outputs,
                bypass_env_check=bypass_env_check)
            need_source_download = (bool(mv.meta.get('source'))
                                    and not mv.needs_source_for_render
                                    and not os.listdir(mv.config.work_dir))
            # if python is in the build specs, but doesn't have a specific associated
            #    version, make sure to add one to newly parsed 'requirements/build'.
            for env in ('build', 'host'):
                insert_python_version(mv, env)
            fm = mv.copy()
            # HACK: trick conda-build into thinking this is final, and computing a hash based
            #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
            #     our ability to differentiate configurations
            fm.final = True
            rendered_metadata[fm.dist()] = (mv, need_source_download,
                                            need_reparse_in_env)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())