def _get_base_folders(base_dir, changed_files):
    recipe_dirs = []
    for f in changed_files:
        # only consider files that come from folders
        if '/' in f:
            f = f.split('/')[0]
        try:
            find_recipe(os.path.join(base_dir, f))
            recipe_dirs.append(f)
        except IOError:
            pass
    return recipe_dirs
Exemple #2
0
def _get_base_folders(base_dir, changed_files):
    recipe_dirs = []
    for f in changed_files:
        # only consider files that come from folders
        if '/' in f:
            f = f.split('/')[0]
        try:
            find_recipe(os.path.join(base_dir, f))
            recipe_dirs.append(f)
        except IOError:
            pass
    return recipe_dirs
def expand_run(graph, conda_resolve, worker, run, steps=0, max_downstream=5,
               recipes_dir=None, matrix_base_dir=None, finalize=False):
    """Apply the build label to any nodes that need (re)building or testing.

    "need rebuilding" means both packages that our target package depends on,
    but are not yet built, as well as packages that depend on our target
    package. For the latter, you can specify how many dependencies deep (steps)
    to follow that chain, since it can be quite large.

    If steps is -1, all downstream dependencies are rebuilt or retested
    """
    downstream = 0
    initial_nodes = len(graph.nodes())

    # for build, we get test automatically.  Give people the max_downstream in terms
    #   of packages, not tasks
    # if run == 'build':
    #     max_downstream *= 2

    def expand_step(task_graph, full_graph, downstream):
        for node in task_graph.nodes():
            for predecessor in full_graph.predecessors(node):
                if max_downstream < 0 or (downstream - initial_nodes) < max_downstream:
                    add_recipe_to_graph(
                        os.path.dirname(full_graph.node[predecessor]['meta'].meta_path),
                        task_graph, run=run, worker=worker, conda_resolve=conda_resolve,
                        recipes_dir=recipes_dir, finalize=finalize)
                    downstream += 1
        return len(graph.nodes())

    # starting from our initial collection of dirty nodes, trace the tree down to packages
    #   that depend on the dirty nodes.  These packages may need to be rebuilt, or perhaps
    #   just tested.  The 'run' argument determines which.

    if steps != 0:
        if not recipes_dir:
            raise ValueError("recipes_dir is necessary if steps != 0.  "
                             "Please pass it as an argument.")
        # here we need to fully populate a graph that has the right build or run/test deps.
        #    We don't create this elsewhere because it is unnecessary and costly.

        # get all immediate subdirectories
        other_top_dirs = [d for d in os.listdir(recipes_dir)
                        if os.path.isdir(os.path.join(recipes_dir, d)) and
                        not d.startswith('.')]
        recipe_dirs = []
        for recipe_dir in other_top_dirs:
            try:
                find_recipe(os.path.join(recipes_dir, recipe_dir))
                recipe_dirs.append(recipe_dir)
            except IOError:
                pass

        # constructing the graph for build will automatically also include the test deps
        full_graph = construct_graph(recipes_dir, worker, 'build', folders=recipe_dirs,
                                     matrix_base_dir=matrix_base_dir, conda_resolve=conda_resolve)

        if steps >= 0:
            for step in range(steps):
                downstream = expand_step(graph, full_graph, downstream)
        else:
            while True:
                nodes = graph.nodes()
                downstream = expand_step(graph, full_graph, downstream)
                if nodes == graph.nodes():
                    break
Exemple #4
0
def expand_run(graph,
               conda_resolve,
               worker,
               run,
               steps=0,
               max_downstream=5,
               recipes_dir=None,
               matrix_base_dir=None,
               finalize=False):
    """Apply the build label to any nodes that need (re)building or testing.

    "need rebuilding" means both packages that our target package depends on,
    but are not yet built, as well as packages that depend on our target
    package. For the latter, you can specify how many dependencies deep (steps)
    to follow that chain, since it can be quite large.

    If steps is -1, all downstream dependencies are rebuilt or retested
    """
    downstream = 0
    initial_nodes = len(graph.nodes())

    # for build, we get test automatically.  Give people the max_downstream in terms
    #   of packages, not tasks
    # if run == 'build':
    #     max_downstream *= 2

    def expand_step(task_graph, full_graph, downstream):
        for node in task_graph.nodes():
            for predecessor in full_graph.predecessors(node):
                if max_downstream < 0 or (downstream -
                                          initial_nodes) < max_downstream:
                    add_recipe_to_graph(os.path.dirname(
                        full_graph.node[predecessor]['meta'].meta_path),
                                        task_graph,
                                        run=run,
                                        worker=worker,
                                        conda_resolve=conda_resolve,
                                        recipes_dir=recipes_dir,
                                        finalize=finalize)
                    downstream += 1
        return len(graph.nodes())

    # starting from our initial collection of dirty nodes, trace the tree down to packages
    #   that depend on the dirty nodes.  These packages may need to be rebuilt, or perhaps
    #   just tested.  The 'run' argument determines which.

    if steps != 0:
        if not recipes_dir:
            raise ValueError("recipes_dir is necessary if steps != 0.  "
                             "Please pass it as an argument.")
        # here we need to fully populate a graph that has the right build or run/test deps.
        #    We don't create this elsewhere because it is unnecessary and costly.

        # get all immediate subdirectories
        other_top_dirs = [
            d for d in os.listdir(recipes_dir)
            if os.path.isdir(os.path.join(recipes_dir, d))
            and not d.startswith('.')
        ]
        recipe_dirs = []
        for recipe_dir in other_top_dirs:
            try:
                find_recipe(os.path.join(recipes_dir, recipe_dir))
                recipe_dirs.append(recipe_dir)
            except IOError:
                pass

        # constructing the graph for build will automatically also include the test deps
        full_graph = construct_graph(recipes_dir,
                                     worker,
                                     'build',
                                     folders=recipe_dirs,
                                     matrix_base_dir=matrix_base_dir,
                                     conda_resolve=conda_resolve)

        if steps >= 0:
            for step in range(steps):
                downstream = expand_step(graph, full_graph, downstream)
        else:
            while True:
                nodes = graph.nodes()
                downstream = expand_step(graph, full_graph, downstream)
                if nodes == graph.nodes():
                    break
def construct_graph(directory,
                    platform,
                    bits,
                    folders=(),
                    deps_type='build',
                    git_rev=None,
                    stop_rev=None):
    '''
    Construct a directed graph of dependencies from a directory of recipes

    deps_type: whether to use build or run/test requirements for the graph.  Avoids cycles.
          values: 'build' or 'test'.  Actually, only 'build' matters - otherwise, it's
                   run/test for any other value.
    '''
    g = nx.DiGraph()
    if not os.path.isabs(directory):
        directory = os.path.normpath(os.path.join(os.getcwd(), directory))
    assert os.path.isdir(directory)

    # get all immediate subdirectories
    other_top_dirs = [
        d for d in os.listdir(directory)
        if os.path.isdir(os.path.join(directory, d)) and not d.startswith('.')
    ]
    recipe_dirs = []
    for recipe_dir in other_top_dirs:
        try:
            find_recipe(os.path.join(directory, recipe_dir))
            recipe_dirs.append(recipe_dir)
        except IOError:
            pass

    if not folders:
        if not git_rev:
            git_rev = 'HEAD'
        folders = git_changed_recipes(git_rev,
                                      stop_rev=stop_rev,
                                      git_root=directory)

    for rd in recipe_dirs:
        recipe_dir = os.path.join(directory, rd)
        pkg, _, _ = api.render(recipe_dir, platform=platform, bits=bits)
        name = pkg.name()

        run_dict = {
            'build': False,  # will be built and tested
            'test': False,  # must be installable; will be tested
            'install':
            False,  # must be installable, but is not necessarily tested
        }
        if rd in folders:
            run_dict[deps_type] = True
        if not pkg.skip():
            # since we have no dependency ordering without a graph, it is conceivable that we add
            #    recipe information after we've already added package info as just a dependency.
            #    This first clause is if we encounter a recipe for the first time.  Its else clause
            #    is when we encounter a recipe after we've already added a node based on a
            #    dependency that can (presumably) be downloaded.
            if name not in g.nodes():
                g.add_node(name,
                           meta=describe_meta(pkg),
                           recipe=recipe_dir,
                           **run_dict)
            else:
                g.node[name]['meta'] = describe_meta(pkg)
                g.node[name]['recipe'] = recipe_dir
                g.node[name].update(run_dict)
        deps = get_build_deps(
            pkg) if deps_type == 'build' else get_run_test_deps(pkg)
        for dep, version in deps.items():
            if dep not in g.nodes():
                # we fill in the rest of the metadata in the
                g.add_node(dep,
                           meta={
                               'build': 0,
                               'run_test_depends': {},
                               'build_depends': {},
                               'version': version
                           })
            g.node[dep]['install'] = True
            g.add_edge(name, dep)
    return g