def outer_in_graph_iter(
    g: nx.DiGraph,
    c: Optional[nx.DiGraph] = None
) -> Generator[Tuple[Set[nxGraphNodeID], Set[nxGraphNodeID]], None, None]:
    """For a directed graph with unique node IDs with type int, iterates
    from outer / leafmost / least depended upon nodes to inner nodes
    yielding sets of node IDs. Optionally, takes a precomputed condensed
    DAG of g.

    Properties:

    * yields each node ID once
    * successive node ID sets only depend on/point to previously visited
    nodes or other nodes within their set
    """
    if len(g.nodes) == 0:
        raise StopIteration("graph has no nodes")

    # > C – The condensation graph C of G. The node labels are integers
    # > corresponding to the index of the component in the list of strongly
    # > connected components of G. C has a graph attribute named ‘mapping’ with
    # > a dictionary mapping the original nodes to the nodes in C to which they
    # > belong. Each node in C also has a node attribute ‘members’ with the set
    # > of original nodes in G that form the SCC that the node in C represents.
    #
    # https://networkx.github.io/documentation/stable/reference/algorithms/generated/networkx.algorithms.components.condensation.html#networkx.algorithms.components.condensation
    if not c:
        c = condensation(g)
    assert is_directed_acyclic_graph(c)
    for scc_ids in outer_in_dag_iter(c):
        descendant_scc_ids: Set[int] = set()
        descendant_scc_ids.update(
            *[descendants(c, scc_id) for scc_id in scc_ids])
        yield scc_ids_to_graph_node_ids(c, scc_ids), scc_ids_to_graph_node_ids(
            c, descendant_scc_ids)
def node_dep_ids_iter(
    g: nx.DiGraph,
    c: Optional[nx.DiGraph] = None
) -> Generator[Tuple[nxGraphNodeID, Set[nxGraphNodeID], Set[nxGraphNodeID]],
               None, None]:
    """For a directed graph with unique node IDs with type int and
    optional precomputed condensed DAG of g, iterates over sets of
    strongly connected components from outer / leafmost / least depended
    upon nodes to inner nodes.

    For each set of strongly connected components, yield each node by
    decreasing ID with its sets of immediate or direct dependencies
    (path length one) and transitive or indirect dependencies (path
    length greater than one).

    Properties:

    * yields each node ID once
    * successive node IDs only depend on/point to previously visited
    nodes or other nodes within their set?
    """
    if not c:
        c = condensation(g)

    for node_ids, indirect_node_ids in outer_in_graph_iter(g, c):
        for node_id in sorted(node_ids, reverse=True):
            direct_dep_ids: Set[int] = set(g.successors(node_id))
            indirect_dep_ids: Set[int] = ((
                set([
                    dest_id for dest_id in node_ids if has_path(
                        g, node_id, dest_id)  # i.e. have a scc in common
                ])
                | indirect_node_ids) - direct_dep_ids - set([node_id]))

            yield node_id, direct_dep_ids, indirect_dep_ids
Beispiel #3
0
    def check_content(self, r_exprs: [RExpr], verbose=False) -> \
            ([CodeGen], Set[str], [IRTerm], [ParseError]):
        errors = []
        ir_terms = []
        (other_forms, record_names, types, funcs, code_gens), type_errors = extract_type(r_exprs)
        # types, ctors, type_errors = self.check_types(type_forms)

        if len(type_errors) > 0:
            errors.extend(type_errors)
            return code_gens, record_names, ir_terms, errors

        # infer_sys = InferSys()
        infer_sys = self.infer_sys
        type_env = TypeEnv.default()

        if self.verbose:
            for type_name, t in types.items():
                print("defined type {} :: {}".format(type_name, t))

            for name, t in funcs.items():
                print("get func {} :: {}".format(name, t))

        schemas = []
        schemas.extend(((TVar(name), infer_sys.generalize(t)) for name, t in types.items()))
        schemas.extend(((TVar(name), infer_sys.generalize(t)) for name, t in funcs.items()))

        type_env = type_env.extend(schemas)

        define_forms = [f for f in other_forms if TypeChecker.is_define_form(f)]
        expr_forms = [f for f in other_forms if not TypeChecker.is_define_form(f)]

        all_def = dict()
        all_def_form = dict()

        for define_form in define_forms:
            define, errs = parse_define(define_form)
            ir_terms.append(define)
            errors.extend(errs)

            all_def[define.get_name()] = define
            all_def_form[define.get_name()] = define_form

        dep_graph = DiGraph()

        for k in all_def.keys():
            # print('all_def key', k)
            dep_graph.add_node(k)

        def_names = set(all_def.keys())
        # for def_name in def_names:
            # print('def_name:', def_name)

        for k, v in all_def.items():
            # print('check ref in', v.get_name())
            refs = v.has_ref(def_names)

            # for ref in refs:
                # print(k, 'refs => ', ref)

            for ref in refs:
                dep_graph.add_edge(ref, k)


        comps = list(scc(dep_graph))
        if self.verbose:
            print('comps:', comps)
        shrink = condensation(dep_graph, comps)
        part_indexs = list(topological_sort(shrink))

        for part_index in part_indexs:
            def_group = comps[part_index]
            if len(def_group) == 1:
                def_name = def_group.pop()
                # print('processing def:', def_name)
                define = all_def[def_name]
                expr = all_def_form[def_name]

                if isinstance(define, IRDefine):
                    t, msg = infer_sys.solve_ir_define(type_env, define)
                else:
                    t, msg = infer_sys.solve_var_define(type_env, define)
                # print('t:', t)
                if msg is not None:
                    # print('msg:', msg)
                    errors.append(
                        ParseError(
                                expr.span,
                                'type error, unification error: {}'.format(msg)
                            )
                        )
                    continue

                s, match_errors = self.report_define_infer(t, define, expr)
                errors.extend(match_errors)
                type_env = type_env.add(define.sym, s)
            else:
                def_names = list(def_group)
                defs = [all_def[name] for name in def_names]
                exprs = [all_def_form[name] for name in def_names]
                types, msg = infer_sys.solve_ir_many_def(type_env, defs)
                if msg is not None:
                    errors.append(
                        ParseError(
                                exprs[0].span,
                                'type error, unification error: {}'.format(msg)
                            )
                        )
                    continue
                for (t, _def, expr) in zip(types, defs, exprs):
                    s, match_errors = self.report_define_infer(t, _def, expr)
                    errors.extend(match_errors)
                    type_env = type_env.add(_def.sym, s)

        # finish at here

        for expr_form in expr_forms:
            ir_expr, errs = parse_ir_expr(expr_form)
            ir_terms.append(ir_expr)
            errors.extend(errs)
            if len(errors) > 0:
                continue
            t, msg = infer_sys.solve_ir_expr(type_env, ir_expr)
            if msg is not None:
                msg = "type error, unification error {}".format(msg)
                errors.append(ParseError(expr_form.span, msg))
                continue
            if self.verbose:
                print('expr: {} :: {}'.format(ir_expr.to_raw(), t))

        return code_gens, record_names, ir_terms, errors