Esempio n. 1
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package, and figure out
    # which python version we prefer. `python_age` can use used to tweak which
    # python gets used here.
    if metadata.noarch or metadata.noarch_python:
        from .conda_interface import VersionOrder
        age = int(
            metadata.get_value('build/noarch_python_build_age',
                               metadata.config.noarch_python_build_age))
        versions = []
        for variant in variants:
            if 'python' in variant:
                vo = variant['python']
                if vo not in versions:
                    versions.append(vo)
        version_indices = sorted(
            range(len(versions)),
            key=lambda k: VersionOrder(versions[k].split(' ')[0]))
        if age < 0:
            age = 0
        elif age > len(versions) - 1:
            age = len(versions) - 1
        build_ver = versions[version_indices[len(versions) - 1 - age]]
        variants = filter_by_key_value(variants, 'python', build_ver,
                                       'noarch_python_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    metadata.config.variant = variants[0]
    used_variables = metadata.get_used_loop_vars(force_global=False)
    top_loop = metadata.get_reduced_variant_set(used_variables)

    for variant in top_loop:
        from conda_build.build import get_all_replacements
        get_all_replacements(variant)
        mv = metadata.copy()
        mv.config.variant = variant

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(
                mv.config.variants, key, values,
                'distribute_variants_reduction') or mv.config.variants)
        get_all_replacements(mv.config.variants)
        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(
            mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        try:
            mv.parse_until_resolved(
                allow_no_other_outputs=allow_no_other_outputs,
                bypass_env_check=bypass_env_check)
        except SystemExit:
            pass
        need_source_download = (not mv.needs_source_for_render
                                or not mv.source_provided)

        rendered_metadata[(mv.dist(),
                           mv.config.variant.get('target_platform', mv.config.subdir),
                           tuple((var, mv.config.variant.get(var))
                                 for var in mv.get_used_vars()))] = \
                                     (mv, need_source_download, None)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Esempio n. 2
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants
    squished_variants = list_of_dicts_to_dict_of_lists(variants)

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()
    for variant in variants:
        mv = metadata.copy()

        # this determines which variants were used, and thus which ones should be locked for
        #     future rendering
        mv.final = False
        mv.config.variant = {}
        mv.parse_again(permit_undefined_jinja=True,
                       allow_no_other_outputs=True,
                       bypass_env_check=True)
        vars_in_recipe = set(mv.undefined_jinja_vars)

        mv.config.variant = variant
        conform_dict = {}
        for key in vars_in_recipe:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            if re.search(r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, recipe_text):
                if key in variant:
                    variant_index = squished_variants[key].index(variant[key])
                    zipped_keys = [key]
                    if 'zip_keys' in variant:
                        zip_key_groups = variant['zip_keys']
                        if zip_key_groups and not isinstance(
                                zip_key_groups[0], list):
                            zip_key_groups = [zip_key_groups]
                        for group in zip_key_groups:
                            if key in group:
                                zipped_keys = group
                                break
                    for zipped_key in zipped_keys:
                        conform_dict[zipped_key] = squished_variants[
                            zipped_key][variant_index]

        conform_dict.update({
            key: val
            for key, val in variant.items()
            if key in mv.meta.get('requirements').get('build', []) +
            mv.meta.get('requirements').get('host', [])
        })

        compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                      recipe_requirements)
        if compiler_matches:
            from conda_build.jinja_context import native_compiler
            for match in compiler_matches:
                compiler_key = '{}_compiler'.format(match)
                conform_dict[compiler_key] = variant.get(
                    compiler_key, native_compiler(match, mv.config))
                conform_dict['target_platform'] = variant['target_platform']

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        mv.config.variants = conform_variants_to_value(mv.config.variants,
                                                       conform_dict)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()
        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (bool(mv.meta.get('source'))
                                and not mv.needs_source_for_render
                                and not os.listdir(mv.config.work_dir))
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one to newly parsed 'requirements/build'.
        for env in ('build', 'host', 'run'):
            utils.insert_variant_versions(mv, env)
        fm = mv.copy()
        # HACK: trick conda-build into thinking this is final, and computing a hash based
        #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
        #     our ability to differentiate configurations
        fm.final = True
        rendered_metadata[fm.dist()] = (mv, need_source_download, None)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Esempio n. 3
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        variants = filter_by_key_value(variants, 'python',
                                       variants[0]['python'],
                                       'noarch_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    for variant in variants:
        mv = metadata.copy()
        mv.config.variant = variant
        used_variables = mv.get_used_loop_vars()
        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])

        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']
        if 'r-base' in build_reqs or 'r-base' in host_reqs:
            conform_dict['r_base'] = variant['r_base']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(
                mv.config.variants, key, values,
                'distribute_variants_reduction') or mv.config.variants)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(
            mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (not mv.needs_source_for_render
                                or not mv.source_provided)

        rendered_metadata[(mv.dist(),
                           mv.config.variant.get('target_platform', mv.config.subdir),
                           tuple((var, mv.config.variant[var])
                                 for var in mv.get_used_vars()))] = \
                                    (mv, need_source_download, None)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Esempio n. 4
0
def compute_builds(path,
                   base_name,
                   git_rev=None,
                   stop_rev=None,
                   folders=None,
                   matrix_base_dir=None,
                   steps=0,
                   max_downstream=5,
                   test=False,
                   public=True,
                   output_dir='../output',
                   output_folder_label='git',
                   config_overrides=None,
                   **kw):
    if not git_rev and not folders:
        raise ValueError(
            "Either git_rev or folders list are required to know what to compute"
        )
    checkout_rev = stop_rev or git_rev
    folders = folders
    path = path.replace('"', '')
    if not folders:
        folders = git_changed_recipes(git_rev, stop_rev, git_root=path)
    if not folders:
        print(
            "No folders specified to build, and nothing changed in git.  Exiting."
        )
        return
    matrix_base_dir = matrix_base_dir or path
    # clean up quoting from concourse template evaluation
    matrix_base_dir = matrix_base_dir.replace('"', '')

    repo_commit = ''
    git_identifier = ''
    if checkout_rev:
        with checkout_git_rev(checkout_rev, path):
            git_identifier = _get_current_git_rev(path)
            task_graph = collect_tasks(path,
                                       folders=folders,
                                       steps=steps,
                                       max_downstream=max_downstream,
                                       test=test,
                                       matrix_base_dir=matrix_base_dir)
            try:
                repo_commit = _get_current_git_rev(path)
            except subprocess.CalledProcessError:
                repo_commit = 'master'
    else:
        task_graph = collect_tasks(path,
                                   folders=folders,
                                   steps=steps,
                                   max_downstream=max_downstream,
                                   test=test,
                                   matrix_base_dir=matrix_base_dir)

    with open(os.path.join(matrix_base_dir, 'config.yml')) as src:
        data = yaml.load(src)
    data['recipe-repo-commit'] = repo_commit

    if config_overrides:
        data.update(config_overrides)

    plan = graph_to_plan_with_jobs(os.path.abspath(path),
                                   task_graph,
                                   commit_id=repo_commit,
                                   matrix_base_dir=matrix_base_dir,
                                   config_vars=data,
                                   public=public)

    output_dir = output_dir.format(base_name=base_name,
                                   git_identifier=git_identifier)

    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)
    with open(os.path.join(output_dir, 'plan.yml'), 'w') as f:
        yaml.dump(plan, f, default_flow_style=False)

    # expand folders to include any dependency builds or tests
    if not os.path.isabs(path):
        path = os.path.normpath(os.path.join(os.getcwd(), path))
    for fn in glob.glob(os.path.join(output_dir, 'output_order*')):
        os.remove(fn)
    for node in nx.topological_sort(task_graph, reverse=True):
        meta = task_graph.node[node]['meta']
        if meta.meta_path:
            recipe = os.path.dirname(meta.meta_path)
        else:
            recipe = meta.get('extra', {}).get('parent_recipe', {})
        assert recipe, ("no parent recipe set, and no path associated "
                        "with this metadata")
        # make recipe path relative
        recipe = recipe.replace(path + '/', '')
        # copy base recipe into a folder named for this node
        out_folder = os.path.join(output_dir, node)
        if os.path.isdir(out_folder):
            shutil.rmtree(out_folder)
        shutil.copytree(os.path.join(path, recipe), out_folder)
        # write the conda_build_config.yml for this particular metadata into that recipe
        #   This should sit alongside meta.yaml, where conda-build will be able to find it
        squished_variants = list_of_dicts_to_dict_of_lists(
            meta.config.variants)
        with open(os.path.join(out_folder, 'conda_build_config.yaml'),
                  'w') as f:
            yaml.dump(squished_variants, f, default_flow_style=False)
        order_fn = 'output_order_' + task_graph.node[node]['worker']['label']
        with open(os.path.join(output_dir, order_fn), 'a') as f:
            f.write(node + '\n')
Esempio n. 5
0
def distribute_variants(metadata, variants, permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False, bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        variants = filter_by_key_value(variants, 'python', variants[0]['python'],
                                       'noarch_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    metadata.config.variant = variants[0]
    used_variables = metadata.get_used_loop_vars(force_global=False)
    top_loop = metadata.get_reduced_variant_set(used_variables)

    for variant in top_loop:
        mv = metadata.copy()
        mv.config.variant = variant

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(mv.config.variants, key, values,
                                                      'distribute_variants_reduction') or
                                  mv.config.variants)

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        try:
            mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                    bypass_env_check=bypass_env_check)
        except SystemExit:
            pass
        need_source_download = (not mv.needs_source_for_render or not mv.source_provided)

        rendered_metadata[(mv.dist(),
                           mv.config.variant.get('target_platform', mv.config.subdir),
                           tuple((var, mv.config.variant.get(var))
                                 for var in mv.get_used_vars()))] = \
                                    (mv, need_source_download, None)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Esempio n. 6
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        variants = filter_by_key_value(variants, 'python',
                                       variants[0]['python'],
                                       'noarch_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants
    squished_variants = list_of_dicts_to_dict_of_lists(variants)

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    for variant in variants:
        mv = metadata.copy()
        mv.config.variant = variant
        used_variables = mv.get_used_loop_vars()
        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        # handle grouping from zip_keys for everything in conform_dict
        if 'zip_keys' in variant:
            zip_key_groups = variant['zip_keys']
            if zip_key_groups and not isinstance(zip_key_groups[0], list):
                zip_key_groups = [zip_key_groups]
            for key in list(conform_dict.keys()):
                zipped_keys = None
                for group in zip_key_groups:
                    if key in group:
                        zipped_keys = group
                    if zipped_keys:
                        # here we zip the values of the keys, so that we can match the combination
                        zipped_values = list(
                            zip(*[
                                squished_variants[key] for key in zipped_keys
                            ]))
                        variant_index = zipped_values.index(
                            tuple(variant[key] for key in zipped_keys))
                        for zipped_key in zipped_keys:
                            conform_dict[zipped_key] = squished_variants[
                                zipped_key][variant_index]

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']
        if 'r-base' in build_reqs or 'r-base' in host_reqs:
            conform_dict['r_base'] = variant['r_base']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(
                mv.config.variants, key, values,
                'distribute_variants_reduction') or mv.config.variants)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(
            mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (not mv.needs_source_for_render
                                or not mv.source_provided)

        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one to newly parsed 'requirements/build'.
        for env in ('build', 'host', 'run'):
            utils.insert_variant_versions(mv.meta.get('requirements', {}),
                                          mv.config.variant, env)

        fm = mv.copy()
        # HACK: trick conda-build into thinking this is final, and computing a hash based
        #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
        #     our ability to differentiate configurations
        fm.final = True
        rendered_metadata[(fm.dist(),
                           fm.config.variant.get('target_platform', fm.config.subdir),
                           tuple((var, fm.config.variant[var])
                                 for var in fm.get_used_loop_vars()))] = \
                                    (mv, need_source_download, None)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())