Esempio n. 1
0
    def test_add_dependencies_for_same_node_incrementally(self):
        # Test same node multiple times
        ts = graphlib.TopologicalSorter()
        ts.add(1, 2)
        ts.add(1, 3)
        ts.add(1, 4)
        ts.add(1, 5)

        ts2 = graphlib.TopologicalSorter({1: {2, 3, 4, 5}})
        self.assertEqual([*ts.static_order()], [*ts2.static_order()])
Esempio n. 2
0
    def _test_graph(self, graph, expected):
        def static_order_with_groups(ts):
            ts.prepare()
            while ts.is_active():
                nodes = ts.get_ready()
                for node in nodes:
                    ts.done(node)
                yield nodes

        ts = graphlib.TopologicalSorter(graph)
        self.assertEqual(list(static_order_with_groups(ts)), list(expected))

        ts = graphlib.TopologicalSorter(graph)
        self.assertEqual(list(ts.static_order()), list(chain(*expected)))
Esempio n. 3
0
def get_dependency_map(
    depman: DependencyManager,
    mods: LilacMods,
) -> Dict[str, Set[Dependency]]:
    '''compute ordered, complete dependency relations between pkgbases (the directory names)

  This function does not make use of pkgname because they maybe the same for
  different pkgdir. Those are carried by Dependency and used elsewhere.
  '''
    map: Dict[str, Set[Dependency]] = defaultdict(set)
    pkgdir_map: Dict[str, Set[str]] = defaultdict(set)
    rmap: Dict[str, Set[str]] = defaultdict(set)

    for pkgbase, mod in mods.items():
        depends = getattr(mod, 'repo_depends', ())

        ds = [depman.get(d) for d in depends]
        if ds:
            for d in ds:
                pkgdir_map[pkgbase].add(d.pkgdir.name)
                rmap[d.pkgdir.name].add(pkgbase)
            map[pkgbase].update(ds)

    dep_order = graphlib.TopologicalSorter(pkgdir_map).static_order()
    for pkgbase in dep_order:
        if pkgbase in rmap:
            deps = map[pkgbase]
            dependers = rmap[pkgbase]
            for dd in dependers:
                map[dd].update(deps)

    return map
Esempio n. 4
0
def iter_sorted(components: typing.Iterable[cm.Component], /) \
-> typing.Generator[cm.Component, None, None]:
    '''
    returns a generator yielding the given components, honouring their dependencies, starting
    with "leaf" components (i.e. components w/o dependencies), also known as topologically sorted.
    '''
    components = (to_component(c) for c in components)
    components_by_id = {c.identity(): c for c in components}

    toposorter = graphlib.TopologicalSorter()

    def ref_to_comp_id(
            component_ref: cm.ComponentReference) -> cm.ComponentIdentity:
        return cm.ComponentIdentity(
            name=component_ref.componentName,
            version=component_ref.version,
        )

    for component_id, component in components_by_id.items():
        depended_on_comp_ids = (ref_to_comp_id(cref)
                                for cref in component.componentReferences)
        toposorter.add(component_id, *depended_on_comp_ids)

    for component_id in toposorter.static_order():
        if not component_id in components_by_id:
            # XXX: ignore component-references not contained in passed components for now
            continue

        yield components_by_id[component_id]
Esempio n. 5
0
File: tree.py Progetto: elis4265/bp1
    def _build_topo(self) -> list[int]:
        """
        Build topological order from spaces, or raise CycleError.

        Parameters
        ----------
        None

        Return
        ------
        *topo.static.order() : list[int]
            list of integers that represent cycle free ordering

        """
        topo = graphlib.TopologicalSorter()
        for space in self.vs:
            predecessors = []
            for subspace_name in chain(space.subspaces_add, space.subspaces_sub):
                subspace_id = self.space(subspace_name, create=False)
                if subspace_id is None:
                    print(f"Virtual world {subspace_name} doesn't exist",
                          file=sys.stderr)
                    continue
                else:
                    predecessors.append(subspace_id)
            topo.add(space.index, *predecessors)
 
        return [*topo.static_order()]
Esempio n. 6
0
    def _apply_traits(self, pipeline_def):
        transformers = [trait.transformer() for trait in pipeline_def._traits_dict.values()]
        transformers_dict = {t.name: t for t in transformers}
        transformer_names = set(transformers_dict.keys())

        for transformer in transformers:
            if not set(transformer.dependencies()).issubset(transformer_names):
                missing = set(transformer.dependencies()) - transformer_names
                raise ModelValidationError(
                    f'{pipeline_def}: trait requires missing traits: ' + ', '.join(missing)
                )

        # order transformers according to dependencies
        toposorter = graphlib.TopologicalSorter()
        for transformer in transformers:
            dependencies = transformer.order_dependencies() & transformer_names
            toposorter.add(transformer.name, *dependencies)

        ordered_transformers = [
            transformers_dict[name] for name in toposorter.static_order()
        ]

        # hardcode meta trait transformer
        ordered_transformers.append(MetaTraitTransformer())

        # inject new steps
        for transformer in ordered_transformers:
            for step in transformer.inject_steps():
                pipeline_def.add_step(step)

        # do remaining processing
        for transformer in ordered_transformers:
            transformer.process_pipeline_args(pipeline_def)
Esempio n. 7
0
def day19(inp, no_part_2=False):
    rules_block, messages = inp.split('\n\n')

    # parse rules into a dict first
    # also toposort the rules
    rules = {}
    toposorter = graphlib.TopologicalSorter()
    for rule in rules_block.splitlines():
        ind, rest = rule.split(': ')
        index = int(ind)
        rules[index] = rest

        # parse rules to pick out integer dependencies for building the dependency graph
        potential_dependencies = rest.replace('|', ' ').split()
        dependencies = [int(dep) for dep in potential_dependencies if dep.isdigit()]
        toposorter.add(index, *dependencies)

    sort_order = list(toposorter.static_order())

    # build a regex from the rules using the toposort order
    mega_regex = regexify(rules, sort_order)
    part1 = sum(1 for msg in messages.splitlines() if mega_regex.fullmatch(msg))

    if no_part_2:
        return part1

    # part 2: hack the regex during construction
    mega_regex_part2 = regexify(rules, sort_order, part2=True)
    part2 = sum(1 for msg in messages.splitlines() if mega_regex_part2.fullmatch(msg))

    return part1, part2
    def test_calls_before_prepare(self):
        ts = graphlib.TopologicalSorter()

        with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"):
            ts.get_ready()
        with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"):
            ts.done(3)
        with self.assertRaisesRegex(ValueError, r"prepare\(\) must be called first"):
            ts.is_active()
Esempio n. 9
0
 def combined_table_headers_default(self):
     hide_doc = True  # Docs must be the first statement to show up
     graph = graphlib.TopologicalSorter()
     table_edges = list(
         zip(self.val_table_headers[::2], self.val_table_headers[1::2]))
     table_edges += list(
         zip(self.group_table_headers[::2], self.group_table_headers[1::2]))
     for (start, end) in table_edges:
         graph.add(end, start)
     return list(graph.static_order())
    def test_the_node_multiple_times(self):
        # Test same node multiple times in dependencies
        self._test_graph({1: {2}, 3: {4}, 0: [2, 4, 4, 4, 4, 4]}, [(2, 4), (1, 3, 0)])

        # Test adding the same dependency multiple times
        ts = graphlib.TopologicalSorter()
        ts.add(1, 2)
        ts.add(1, 2)
        ts.add(1, 2)
        self.assertEqual([*ts.static_order()], [2, 1])
Esempio n. 11
0
    def ordered_steps(self) -> typing.Generator[tuple[str], None, None]:
        dependencies = {step.name: step.depends() for step in self.steps()}
        # add dependencies on trait-defined steps
        for step in self.steps():
            dependencies[step.name] |= {
                s.name
                for s in self.steps()
                if s.injecting_trait_name() in step.trait_depends()
            }

        def iter_results(toposorter: graphlib.TopologicalSorter):
            while toposorter.is_active():
                ready_tasks = tuple(toposorter.get_ready())
                toposorter.done(*ready_tasks)
                yield ready_tasks

        try:
            toposorter = graphlib.TopologicalSorter(graph=dependencies)
            toposorter.prepare()
            return iter_results(toposorter=toposorter)
        except graphlib.CycleError as ce:
            cycle_steps = ce.args[1]  # contains a list of circular steps
            dependencies = self._find_and_resolve_publish_trait_circular_dependencies(
                dependencies,
                cycle_info=cycle_steps,
            )
            try:
                # check whether resolving the dependency between the publish trait has already
                # fixed the issue
                toposorter = graphlib.TopologicalSorter(graph=dependencies)
                toposorter.prepare()
                return iter_results(toposorter=toposorter)
            except graphlib.CycleError as ce:
                cycle_steps = ce.args[1]  # contains a list of circular steps
                dependencies = self._find_and_resolve_release_trait_circular_dependencies(
                    dependencies,
                    cycle_info=cycle_steps,
                )
                # try again - if there is still a cyclic dependency, this is probably caused
                # by a user error - so let it propagate
                toposorter = graphlib.TopologicalSorter(graph=dependencies)
                toposorter.prepare()
                return iter_results(toposorter=toposorter)
 def _assert_cycle(self, graph, cycle):
     ts = graphlib.TopologicalSorter()
     for node, dependson in graph.items():
         ts.add(node, *dependson)
     try:
         ts.prepare()
     except graphlib.CycleError as e:
         msg, seq = e.args
         self.assertIn(" ".join(map(str, cycle)), " ".join(map(str, seq * 2)))
     else:
         raise
    def test_invalid_nodes_in_done(self):
        ts = graphlib.TopologicalSorter()
        ts.add(1, 2, 3, 4)
        ts.add(2, 3, 4)
        ts.prepare()
        ts.get_ready()

        with self.assertRaisesRegex(ValueError, "node 2 was not passed out"):
            ts.done(2)
        with self.assertRaisesRegex(ValueError, r"node 24 was not added using add\(\)"):
            ts.done(24)
Esempio n. 14
0
    def _test_graph(self, graph, expected):
        def static_order_with_groups(ts):
            ts.prepare()
            while ts.is_active():
                nodes = ts.get_ready()
                for node in nodes:
                    ts.done(node)
                yield tuple(sorted(nodes))

        ts = graphlib.TopologicalSorter(graph)
        self.assertEqual(list(static_order_with_groups(ts)), list(expected))

        ts = graphlib.TopologicalSorter(graph)
        # need to be a bit careful comparing the result of ts.static_order and
        # expected, because the order within a group is dependent on set
        # iteration order
        it = iter(ts.static_order())
        for group in expected:
            tsgroup = {next(it) for element in group}
            self.assertEqual(set(group), tsgroup)
Esempio n. 15
0
def topological_sort(names: tuple[str]) -> tuple[Category]:
    visited = set()
    to_visit = list(as_categories(names))
    sorter = graphlib.TopologicalSorter()
    while to_visit:
        category = to_visit.pop()
        prerequisites = as_categories(category.prerequisites, default=())
        visited.add(category)
        sorter.add(category, *prerequisites)
        to_visit.extend(prerequisite for prerequisite in prerequisites
                        if prerequisite not in visited)
    return sorter.static_order()
def try_stock(new,old):
	return_list=[]
	# Driver Code 
	graph = defaultdict(list)
	for i in range(len(old)):
		graph[old[i]].append(new[i])
	ts = graphlib.TopologicalSorter(graph)
	out_list=list(ts.static_order())[::-1]
	for stock in out_list:
		if len(graph[stock])!=0:
			return_list.append([graph[stock][0],stock])
	return return_list
Esempio n. 17
0
    def test_order_of_insertion_does_not_matter_between_groups(self):
        def get_groups(ts):
            ts.prepare()
            while ts.is_active():
                nodes = ts.get_ready()
                ts.done(*nodes)
                yield set(nodes)

        ts = graphlib.TopologicalSorter()
        ts.add(3, 2, 1)
        ts.add(1, 0)
        ts.add(4, 5)
        ts.add(6, 7)
        ts.add(4, 7)

        ts2 = graphlib.TopologicalSorter()
        ts2.add(1, 0)
        ts2.add(3, 2, 1)
        ts2.add(4, 7)
        ts2.add(6, 7)
        ts2.add(4, 5)

        self.assertEqual(list(get_groups(ts)), list(get_groups(ts2)))
Esempio n. 18
0
    def test_is_active(self):
        ts = graphlib.TopologicalSorter()
        ts.add(1, 2)
        ts.prepare()

        self.assertTrue(ts.is_active())
        self.assertEqual(ts.get_ready(), (2, ))
        self.assertTrue(ts.is_active())
        ts.done(2)
        self.assertTrue(ts.is_active())
        self.assertEqual(ts.get_ready(), (1, ))
        self.assertTrue(ts.is_active())
        ts.done(1)
        self.assertFalse(ts.is_active())
Esempio n. 19
0
def _sort_plugins(console, plugin_data: dict[str, PluginData]) -> list:
    console.bot('Sorting plugins according to their dependency tree...')

    # Ensure admin plugin is always listed first
    sorted_plugin_list = [plugin_data.pop('admin')]
    plugin_graph = {
        name: set(
            plugin.clazz.requiresPlugins +
            [z for z in plugin.clazz.loadAfterPlugins if z in plugin_data]
        )
        for name, plugin in plugin_data.items()
    }
    sorted_plugin_list += [
        plugin_data[x]
        for x in graphlib.TopologicalSorter(plugin_graph).static_order()
    ]
    return sorted_plugin_list
Esempio n. 20
0
    def __init__(self, *structure, prior_count: int = None):

        self.prior_count = prior_count

        def coerce_list(obj):
            if isinstance(obj, list):
                return obj
            return [obj]

        # The structure is made up of nodes (scalars) and edges (tuples)
        edges = (e for e in structure if isinstance(e, tuple))
        nodes = set(e for e in structure if not isinstance(e, tuple))

        # Convert edges into children and parent connections
        self.parents = collections.defaultdict(set)
        self.children = collections.defaultdict(set)

        for parents, children in edges:
            for parent, child in itertools.product(coerce_list(parents),
                                                   coerce_list(children)):
                self.parents[child].add(parent)
                self.children[parent].add(child)

        # collections.defaultdict(set) -> dict(list)
        self.parents = {
            node: list(sorted(parents))
            for node, parents in self.parents.items()
        }
        self.children = {
            node: list(sorted(children))
            for node, children in self.children.items()
        }

        # The nodes are sorted in topological order. Nodes of the same level are sorted in
        # lexicographic order.
        ts = graphlib.TopologicalSorter()
        for node in sorted(
            {*self.parents.keys(), *self.children.keys(), *nodes}):
            ts.add(node, *self.parents.get(node, []))
        self.nodes = list(ts.static_order())

        self.P = {}
        self._P_sizes = {}
Esempio n. 21
0
    def test_done(self):
        ts = graphlib.TopologicalSorter()
        ts.add(1, 2, 3, 4)
        ts.add(2, 3)
        ts.prepare()

        self.assertEqual(ts.get_ready(), (3, 4))
        # If we don't mark anything as done, get_ready() returns nothing
        self.assertEqual(ts.get_ready(), ())
        ts.done(3)
        # Now 2 becomes available as 3 is done
        self.assertEqual(ts.get_ready(), (2, ))
        self.assertEqual(ts.get_ready(), ())
        ts.done(4)
        ts.done(2)
        # Only 1 is missing
        self.assertEqual(ts.get_ready(), (1, ))
        self.assertEqual(ts.get_ready(), ())
        ts.done(1)
        self.assertEqual(ts.get_ready(), ())
        self.assertFalse(ts.is_active())
Esempio n. 22
0
    def handle(self) -> int:
        pkgname = self.argument("name")
        keepwork = self.option("keepwork")
        destination = self.option("dest")
        generic = self.option("generic")
        libc = self.option("libc")
        build_source = self.option("build-source")
        build_debug = self.option("build-debug")
        src_ref = self.option("source-ref")
        version = self.option("pkg-version")
        revision = self.option("pkg-revision")
        subdist = self.option("pkg-subdist")
        is_release = self.option("release")
        extra_opt = self.option("extra-optimizations")
        jobs = self.option("jobs")

        target = targets.detect_target(self.io, portable=generic, libc=libc)
        target.prepare()

        modname, _, clsname = pkgname.rpartition(":")

        mod = importlib.import_module(modname)
        pkgcls = getattr(mod, clsname)
        if src_ref:
            if "extras" not in pkgcls.sources[0]:
                pkgcls.sources[0]["extras"] = {}
            pkgcls.sources[0]["extras"]["version"] = src_ref
        root_pkg = pkgcls.resolve(
            self.io,
            version=version,
            revision=revision,
            is_release=is_release,
            target=target,
        )

        sources = root_pkg.get_sources()

        if len(sources) != 1:
            self.error("Only single-source git packages are supported")
            return 1

        source = sources[0]
        if not isinstance(source, af_sources.GitSource):
            self.error("Only single-source git packages are supported")
            return 1

        root = project_package.ProjectPackage("__root__", "1")
        root.python_versions = af_python.python_dependency.pretty_constraint
        root.add_dependency(
            poetry_dep.Dependency(root_pkg.name, root_pkg.version))
        af_repo.bundle_repo.add_package(root)

        target_capabilities = target.get_capabilities()
        extras = [f"capability-{c}" for c in target_capabilities]

        repo_pool = af_repo.Pool()
        repo_pool.add_repository(target.get_package_repository())
        repo_pool.add_repository(af_repo.bundle_repo, secondary=True)

        item_repo = root_pkg.get_package_repository(target, io=self.io)
        if item_repo is not None:
            repo_pool.add_repository(item_repo, secondary=True)

        provider = af_repo.Provider(root, repo_pool, self.io, extras=extras)
        resolution = poetry_solver.resolve_version(root, provider)

        env = poetry_env.SystemEnv(pathlib.Path(sys.executable))
        pkg_map = {}
        graph = {}
        for dep_package in resolution.packages:
            pkg_map[dep_package.name] = dep_package.package
            package = dep_package.package
            if env.is_valid_for_marker(dep_package.dependency.marker):
                deps = {
                    req.name
                    for req in package.requires
                    if env.is_valid_for_marker(req.marker)
                }
                graph[package.name] = deps
        sorter = graphlib.TopologicalSorter(graph)
        packages = [pkg_map[pn] for pn in sorter.static_order()]

        # Build a separate package list for build deps.
        build_root = project_package.ProjectPackage("__build_root__", "1")
        build_root.python_versions = (
            af_python.python_dependency.pretty_constraint)
        build_root.add_dependency(
            poetry_dep.Dependency(root_pkg.name, root_pkg.version))
        build_root.build_requires = []
        provider = af_repo.Provider(build_root,
                                    repo_pool,
                                    self.io,
                                    include_build_reqs=True)
        resolution = poetry_solver.resolve_version(build_root, provider)

        pkg_map = {}
        graph = {}
        for dep_package in resolution.packages:
            pkg_map[dep_package.name] = dep_package.package
            package = dep_package.package
            if env.is_valid_for_marker(dep_package.dependency.marker):
                reqs = set(package.requires) | set(
                    getattr(package, "build_requires", []))
                deps = {
                    req.name
                    for req in reqs if req.is_activated()
                    and env.is_valid_for_marker(req.marker)
                }
                graph[package.name] = deps

        # Workaround cycles in build/runtime dependencies between
        # packages.  This requires the depending package to explicitly
        # declare its cyclic runtime dependencies in get_cyclic_runtime_deps()
        # and then the cyclic dependency must take care to inject itself
        # into the dependent's context to build itself (e.g. by manipulating
        # PYTHONPATH at build time.)  An example of such cycle is
        # flit-core -> tomli -> flit-core.
        cyclic_runtime_deps = collections.defaultdict(list)
        last_cycle = None
        current_cycle = None
        while True:
            sorter = graphlib.TopologicalSorter(graph)

            try:
                build_pkgs = [pkg_map[pn] for pn in sorter.static_order()]
            except graphlib.CycleError as e:
                cycle = e.args[1]
                if len(cycle) > 3 or cycle == last_cycle:
                    raise

                dep = pkg_map[cycle[-1]]
                pkg_with_dep = pkg_map[cycle[-2]]
                if dep.name not in pkg_with_dep.get_cyclic_runtime_deps():
                    dep, pkg_with_dep = pkg_with_dep, dep
                    if dep.name not in pkg_with_dep.get_cyclic_runtime_deps():
                        raise

                last_cycle = current_cycle
                current_cycle = cycle
                cyclic_runtime_deps[pkg_with_dep].append(dep)
                graph[pkg_with_dep.name].remove(dep.name)
            else:
                break

        for pkg_with_cr_deps, cr_deps in cyclic_runtime_deps.items():
            for i, build_pkg in enumerate(build_pkgs):
                if build_pkg == pkg_with_cr_deps:
                    build_pkgs[i + 1:i + 1] = cr_deps
                    break

        if keepwork:
            workdir = tempfile.mkdtemp(prefix="metapkg.")
        else:
            tempdir = tempfile.TemporaryDirectory(prefix="metapkg.")
            workdir = tempdir.name

        os.chmod(workdir, 0o755)

        try:
            target.build(
                targets.BuildRequest(
                    io=self.io,
                    env=env,
                    root_pkg=root_pkg,
                    deps=packages,
                    build_deps=build_pkgs,
                    workdir=workdir,
                    outputdir=destination,
                    build_source=build_source,
                    build_debug=build_debug,
                    revision=revision or "1",
                    subdist=subdist,
                    extra_opt=extra_opt,
                    jobs=jobs or 0,
                ), )
        finally:
            if not keepwork:
                tempdir.cleanup()

        return 0
    def decode(self, problem: Problem, rotations: Optional[List] = None) -> Floorplan:
        """
        Decode:
            Based on the sequence pair and the problem with rotations information, calculate a floorplan
            (bounding box, area, and rectangle positions).
        """

        if not isinstance(problem, Problem):
            raise TypeError("Invalid argument: 'problem' must be an instance of Problem.")

        if problem.n != self.n:
            raise ValueError("'problem.n' must be the same as the sequence-pair length.")

        if rotations is not None:
            if len(rotations) != self.n:
                raise ValueError("'rotations' length must be the same as the sequence-pair length.")

        coords = self.oblique_grid.coordinates

        # Width and height dealing with rotations
        width_wrot = []
        height_wrot = []
        for i in range(self.n):
            if (rotations is None) or (rotations[i] % 2 == 0):
                # no rotation
                width_wrot.append(problem.rectangles[i]["width"])
                height_wrot.append(problem.rectangles[i]["height"])
            else:
                # with rotation
                assert problem.rectangles[i]["rotatable"]
                width_wrot.append(problem.rectangles[i]["height"])
                height_wrot.append(problem.rectangles[i]["width"])

        # Calculate the longest path in the "Horizontal Constraint Graph" (G_h)
        # This time complexity is O(n^2), may be optimized...
        graph_h: Dict[int, List] = {i: [] for i in range(self.n)}
        for i in range(self.n):
            for j in range(self.n):
                # When j is right of i, set an edge from j to i
                if (coords[i]["a"] < coords[j]["a"]) and (coords[i]["b"] < coords[j]["b"]):
                    graph_h[j].append(i)

        # Topological order of DAG (G_h)
        topo_h = graphlib.TopologicalSorter(graph_h)
        torder_h = list(topo_h.static_order())

        # Calculate W (bounding box width) from G_h
        dist_h = [width_wrot[i] for i in range(self.n)]
        for i in torder_h:
            dist_h[i] += max([dist_h[e] for e in graph_h[i]], default=0)
        bb_width = max(dist_h)

        # Calculate the longest path in the "Vertical Constraint Graph" (G_v)
        # This time complexity is O(n^2), may be optimized...
        graph_v: Dict[int, List] = {i: [] for i in range(self.n)}
        for i in range(self.n):
            for j in range(self.n):
                # When j is above i, set an edge from j to i
                if (coords[i]["a"] > coords[j]["a"]) and (coords[i]["b"] < coords[j]["b"]):
                    graph_v[j].append(i)

        # Topological order of DAG (G_v)
        topo_v = graphlib.TopologicalSorter(graph_v)
        torder_v = list(topo_v.static_order())

        # Calculate H (bounding box height) from G_v
        dist_v = [height_wrot[i] for i in range(self.n)]
        for i in torder_v:
            dist_v[i] += max([dist_v[e] for e in graph_v[i]], default=0)
        bb_height = max(dist_v)

        # Calculate bottom-left positions
        positions = []
        for i in range(self.n):
            positions.append(
                {
                    "id": i,
                    "x": dist_h[i] - width_wrot[i],  # distance from left edge
                    "y": dist_v[i] - height_wrot[i],  # distande from bottom edge
                    "width": width_wrot[i],
                    "height": height_wrot[i],
                }
            )

        return Floorplan(bounding_box=(bb_width, bb_height), positions=positions)
Esempio n. 24
0
def parse_param_exprs(
        params, descs,
        silent_errors=False,
        silent_warnings=False,
        throw_on_errors=True,
        throw_on_warnings=False,
        invalid_keys_syntax=None,
        invalid_keys_unknown=None,
        invalid_keys_repeated=None,
        invalid_keys_bad_scalar=None,
        invalid_keys_bad_vector=None,
        invalid_exprs_bad_value=None,
        invalid_exprs_bad_syntax=None,
        invalid_exprs_bad_scalar=None,
        invalid_exprs_bad_vector=None):

    if invalid_keys_syntax is None:
        invalid_keys_syntax = []
    if invalid_keys_unknown is None:
        invalid_keys_unknown = []
    if invalid_keys_repeated is None:
        invalid_keys_repeated = {}
    if invalid_keys_bad_scalar is None:
        invalid_keys_bad_scalar = []
    if invalid_keys_bad_vector is None:
        invalid_keys_bad_vector = {}

    if invalid_exprs_bad_value is None:
        invalid_exprs_bad_value = []
    if invalid_exprs_bad_syntax is None:
        invalid_exprs_bad_syntax = []
    if invalid_exprs_bad_scalar is None:
        invalid_exprs_bad_scalar = {}
    if invalid_exprs_bad_vector is None:
        invalid_exprs_bad_vector = {}

    keys, values, param_names, param_indices = parse_param_keys(
        params, descs,
        silent_errors,
        silent_warnings,
        throw_on_errors,
        throw_on_warnings,
        invalid_keys_syntax,
        invalid_keys_unknown,
        invalid_keys_repeated,
        invalid_keys_bad_scalar,
        invalid_keys_bad_vector)

    keys_2 = []
    values_2 = []
    param_names_2 = []
    param_indices_2 = []
    ast_root_nodes = []
    eparams_to_keys = {}
    expr_param_symbols = []

    for key, value, name, indices in zip(
            keys, values, param_names, param_indices):
        # Skip invalid expressions
        if not _is_param_value_expr(value, True):
            invalid_exprs_bad_value.append(key)
            continue
        # All expressions can be converted to str
        value = str(value)
        # Parse expression and retrieve AST root node
        try:
            ast_root = ast.parse(value)
        except SyntaxError:
            invalid_exprs_bad_syntax.append(key)
            continue
        # Discover all symbols in expression
        visitor = _ParamExprVisitor(descs)
        visitor.visit(ast_root)
        # Keep track of invalid symbols
        if visitor.invalid_scalars():
            invalid_exprs_bad_scalar[key] = visitor.invalid_scalars()
        if visitor.invalid_vectors():
            invalid_exprs_bad_vector[key] = visitor.invalid_vectors()
        # Skip expressions with invalid symbols
        if visitor.invalid_scalars() or visitor.invalid_vectors():
            continue
        # Keep track of the parent key for each exploded param
        eparams_to_keys.update(
            {eparam: key for eparam in explode_param_name_from_indices(name, indices)})
        # This is a valid key-value pair
        keys_2.append(key)
        values_2.append(value)
        param_names_2.append(name)
        param_indices_2.append(indices)
        ast_root_nodes.append(ast_root)
        expr_param_symbols.append(visitor.symbols())

    if invalid_exprs_bad_value:
        _log_msg(
            logging.ERROR,
            silent_errors,
            throw_on_errors,
            f"the value of the following parameter keys "
            f"cannot be converted to an expression: "
            f"{str(invalid_exprs_bad_value)}")
    if invalid_exprs_bad_syntax:
        _log_msg(
            logging.ERROR,
            silent_errors,
            throw_on_errors,
            f"the value of the following parameter keys "
            f"contains an expression with syntax errors: "
            f"{str(invalid_exprs_bad_syntax)}")
    if invalid_exprs_bad_scalar:
        _log_msg(
            logging.ERROR,
            silent_errors,
            throw_on_errors,
            f"the value of the following parameter keys "
            f"contains an expression with invalid scalar symbols: "
            f"{str(invalid_exprs_bad_scalar)}")
    if invalid_exprs_bad_vector:
        _log_msg(
            logging.ERROR,
            silent_errors,
            throw_on_errors,
            f"the value of the following parameter keys "
            f"contains an expression with invalid vector symbols: "
            f"{str(invalid_exprs_bad_vector)}")

    # Build dependency graph of all exploded parameters
    graph = graphlib.TopologicalSorter()
    for name, indices, symbols in zip(
            param_names_2, param_indices_2, expr_param_symbols):
        print(f"name: {name}, symbols: {dict(symbols)}, values: {list(symbols.values())}")
        eparams_lhs = explode_param_name_from_indices(name, indices)
        eparams_rhs = explode_param_names_from_indices(symbols.keys(), symbols.values())
        for pair in itertools.product(eparams_lhs, eparams_rhs):
            graph.add(pair[1], pair[0])

    # Perform topological sorting on the graph
    try:
        sorted_eparams = list(reversed(list(graph.static_order())))
    except graphlib.CycleError as e:
        raise RuntimeError(
            f"circular dependencies found between param expressions; "
            f"have a look at the following cycle: {e.args[1]}") from e

    # Using the sorted exploded parameters derive a list of keys
    # The order of keys reflects the expression evaluation order
    sorted_keys = []
    for eparam in sorted_eparams:
        key = eparams_to_keys.get(eparam)
        if key and key not in sorted_keys:
            sorted_keys.append(key)
    for key in keys_2:
        if key not in sorted_keys:
            sorted_keys.append(key)

    # Apply the correct order to the output arrays
    order = [keys_2.index(key) for key in sorted_keys]
    keys_2 = [keys_2[i] for i in order]
    values_2 = [values_2[i] for i in order]
    param_names_2 = [param_names_2[i] for i in order]
    param_indices_2 = [param_indices_2[i] for i in order]
    ast_root_nodes = [ast_root_nodes[i] for i in order]

    return keys_2, values_2, param_names_2, param_indices_2, ast_root_nodes
Esempio n. 25
0
 def topological_sort(self) -> list[T]:
     return list(graphlib.TopologicalSorter(self._graph).static_order())
Esempio n. 26
0
 def test_not_hashable_nodes(self):
     ts = graphlib.TopologicalSorter()
     self.assertRaises(TypeError, ts.add, dict(), 1)
     self.assertRaises(TypeError, ts.add, 1, dict())
     self.assertRaises(TypeError, ts.add, dict(), dict())
Esempio n. 27
0
def topological_sort(dependency_graph):
    return list(graphlib.TopologicalSorter(dependency_graph).static_order())
Esempio n. 28
0
 def test_prepare_multiple_times(self):
     ts = graphlib.TopologicalSorter()
     ts.prepare()
     with self.assertRaisesRegex(ValueError,
                                 r"cannot prepare\(\) more than once"):
         ts.prepare()
Esempio n. 29
0
 def test_graph_with_iterables(self):
     dependson = (2 * x + 1 for x in range(5))
     ts = graphlib.TopologicalSorter({0: dependson})
     self.assertEqual(list(ts.static_order()), [1, 3, 5, 7, 9, 0])
Esempio n. 30
0
#!/usr/bin/env python3
#
# Python 3.9: New module: graphlib
#

import graphlib

#
#           C --> F
#           ^      \
#          / \      v
#   A --> B   D --> G
#          \ /
#           v
#           E
#
graph = {
    'A': [],
    'B': ['A'],
    'C': ['B', 'D'],
    'D': [],
    'E': ['B', 'D'],
    'F': ['C'],
    'G': ['D', 'F'],
}
ts = graphlib.TopologicalSorter(graph)
print(list(ts.static_order()))  # ['A', 'D', 'B', 'C', 'E', 'F', 'G']