コード例 #1
0
def collapse_subpackage_nodes(graph):
    """Collapse all subpackage nodes into their parent recipe node

    We get one node per output, but a given recipe can have multiple outputs.  It's important
    for dependency ordering in the graph that the outputs exist independently, but once those
    dependencies are established, we need to collapse subpackages down to a single job for the
    top-level recipe."""
    # group nodes by their recipe path first, then within those groups by their variant
    node_groups = {}
    for node in graph.nodes():
        if 'meta' in graph.node[node]:
            meta = graph.node[node]['meta']
            meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path']
            master = False

            master_meta = MetaData(meta_path, config=meta.config)
            if master_meta.name() == meta.name():
                master = True
            group = node_groups.get(meta_path, {})
            subgroup = group.get(HashableDict(meta.config.variant), {})
            if master:
                if 'master' in subgroup:
                    raise ValueError("tried to set more than one node in a group as master")
                subgroup['master'] = node
            else:
                sps = subgroup.get('subpackages', [])
                sps.append(node)
                subgroup['subpackages'] = sps
            group[HashableDict(meta.config.variant)] = subgroup
            node_groups[meta_path] = group

    for recipe_path, group in node_groups.items():
        for variant, subgroup in group.items():
            # if no node is the top-level recipe (only outputs, no top-level output), need to obtain
            #     package/name from recipe given by common recipe path.
            subpackages = subgroup.get('subpackages')
            if 'master' not in subgroup:
                sp0 = graph.node[subpackages[0]]
                master_meta = MetaData(recipe_path, config=sp0['meta'].config)
                worker = sp0['worker']
                master_key = package_key(master_meta, worker['label'])
                graph.add_node(master_key, meta=master_meta, worker=worker)
                master = graph.node[master_key]
            else:
                master = subgroup['master']
                master_key = package_key(graph.node[master]['meta'],
                                         graph.node[master]['worker']['label'])
            # fold in dependencies for all of the other subpackages within a group.  This is just
            #     the intersection of the edges between all nodes.  Store this on the "master" node.
            if subpackages:
                remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages]
                for edge in remap_edges:
                    # make sure not to add references to yourself
                    if edge[0] != master_key:
                        graph.add_edge(edge[0], master_key)
                    graph.remove_edge(*edge)

                # remove nodes that have been folded into master nodes
                for subnode in subpackages:
                    graph.remove_node(subnode)
コード例 #2
0
ファイル: variants.py プロジェクト: gabm/conda-build
def conform_variants_to_value(list_of_dicts, dict_of_values):
    """We want to remove some variability sometimes.  For example, when Python is used by the
    top-level recipe, we do not want a further matrix for the outputs.  This function reduces
    the variability of the variant set."""
    for d in list_of_dicts:
        for k, v in dict_of_values.items():
            d[k] = v
    return list(set([HashableDict(d) for d in list_of_dicts]))
コード例 #3
0
def test_pin_subpackage_expression(testing_metadata):
    output_dict = {'name': 'a'}
    testing_metadata.meta['outputs'] = [output_dict]
    fm = testing_metadata.get_output_metadata(output_dict)
    testing_metadata.other_outputs = {('a', HashableDict(testing_metadata.config.variant)):
                                      (output_dict, fm)}
    pin = jinja_context.pin_subpackage(testing_metadata, 'a')
    assert len(pin.split()) == 2
コード例 #4
0
def test_pin_subpackage_exact(testing_metadata):
    name = testing_metadata.name()
    output_dict = {'name': name}
    testing_metadata.meta['outputs'] = [output_dict]
    fm = testing_metadata.get_output_metadata(output_dict)
    testing_metadata.other_outputs = {(name, HashableDict(testing_metadata.config.variant)):
                                      (output_dict, fm)}
    pin = jinja_context.pin_subpackage(testing_metadata, name, exact=True)
    assert len(pin.split()) == 3
コード例 #5
0
    def _get_hash_contents(self):
        sections = ['source', 'requirements', 'build']
        # make a copy of values, so that no sorting occurs in place
        composite = HashableDict({section: copy.copy(self.get_section(section))
                                  for section in sections})
        outputs = self.get_section('outputs')
        if outputs:
            outs = []
            for out in outputs:
                out = copy.copy(out)
                # files are dynamically determined, and there's no way to match them at render time.
                #    we need to exclude them from the hash.
                if 'files' in out:
                    del out['files']
                outs.append(out)
            composite.update({'outputs': [HashableDict(out) for out in outs]})

        # filter build requirements for ones that should not be in the hash
        requirements = composite.get('requirements', {})
        build_reqs = requirements.get('build', [])
        excludes = self.config.variant.get('exclude_from_build_hash', [])
        if excludes:
            exclude_pattern = re.compile('|'.join('{}[\s$]?.*'.format(exc) for exc in excludes))
            build_reqs = [req for req in build_reqs if not exclude_pattern.match(req)]
        requirements['build'] = build_reqs
        composite['requirements'] = requirements

        # remove the build number from the hash, so that we can bump it without changing the hash
        if 'number' in composite['build']:
            del composite['build']['number']
        # remove the build string, so that hashes don't affect themselves
        if 'string' in composite['build']:
            del composite['build']['string']
        if not composite['build']:
            del composite['build']
        for key in 'build', 'run':
            if key in composite['requirements'] and not composite['requirements'].get(key):
                del composite['requirements'][key]
        trim_empty_keys(composite)
        file_paths = []

        if self.path:
            recorded_input_files = os.path.join(self.path, '..', 'hash_input_files')
            if os.path.exists(recorded_input_files):
                with open(recorded_input_files) as f:
                    file_paths = f.read().splitlines()
            else:
                files = utils.rec_glob(self.path, "*")
                file_paths = sorted([f.replace(self.path + os.sep, '') for f in files])
                # exclude meta.yaml and meta.yaml.template, because the json dictionary captures
                #    their content
                file_paths = [f for f in file_paths if not f.startswith('meta.yaml')]
                file_paths = sorted(filter_files(file_paths, self.path))

        return composite, file_paths