Esempio n. 1
0
def spotAtm_to_automaton(atm:Union[spot.twa, spot.twa_graph],
                         states_prefix:str,
                         signal_by_name:Dict[str, Signal],
                         atm_name:str) -> Automaton:
    def node_name(s):
        return states_prefix + str(s)

    myState_by_spotState = dict()  # type: Dict[int, Node]

    queue = {atm.get_init_state_number()}  # type: Set[int]
    processed = set()                      # type: Set[int]
    while len(queue):
        state_num = queue.pop()
        processed.add(state_num)

        src = myState_by_spotState.setdefault(state_num, Node(node_name(state_num)))
        for e in atm.out(state_num):  # type: spot.twa_graph_edge_storage
            if e.dst not in processed:
                queue.add(e.dst)
            dst_node = myState_by_spotState.setdefault(e.dst, Node(node_name(e.dst)))
            labels = parse_bdd(e.cond, atm.get_dict(), signal_by_name)
            for l in labels:
                src.add_transition(l, [(dst_node,e.acc.count() != 0)])

    return Automaton({myState_by_spotState[atm.get_init_state_number()]},
                     myState_by_spotState.values(),
                     atm_name)
    def test_is_not_absorbing(self):
        node = Node('node')

        true_label = Label({})
        node.add_transition(true_label, [('dst1', True)])

        assert not is_final_sink(node)
Esempio n. 3
0
def k_reduce(atm: Automaton, k: int, uniform: bool = True) -> Automaton:
    """
    When `uniform` is set, we reset the counter on crossing the border between SCCs.
    When it is not set, k limits the total number of visits to bad states.
    """
    assert k >= 0, k

    finSCC_by_node = build_state_to_final_scc(atm)

    dead_node = Node('dead')
    dead_node.add_transition(LABEL_TRUE, {(dead_node, True)})
    dead_node.k = 0

    new_by_old_k = dict()  # type: Dict[Pair[Node, int], Node]

    def _get_add_node(old_n: Node, k: int) -> Node:
        if k < 0:
            return dead_node
        new_node = new_by_old_k[(old_n, k)] = new_by_old_k.get(
            (old_n, k), Node(old_n.name + 'k' + str(k)))
        new_node.k = k
        return new_node

    old_by_new = dict()  # type: Dict[Node, Node]

    nodes_to_process = set()  # type: Set[Node]
    for n in atm.init_nodes:
        new_n = _get_add_node(n, k)
        old_by_new[new_n] = n
        nodes_to_process.add(new_n)

    processed_nodes = set()  # type: Set[Node]
    processed_nodes.add(dead_node)
    while nodes_to_process:
        new_src = nodes_to_process.pop()
        processed_nodes.add(new_src)
        old_src = old_by_new[new_src]
        for lbl, node_flag_pairs in old_by_new[new_src].transitions.items(
        ):  # type: (Label, Set[Pair[Node, bool]])
            for old_dst, is_fin in node_flag_pairs:
                if is_final_sink(old_dst):
                    new_dst = dead_node
                else:
                    new_dst_k = new_src.k - is_fin if (not uniform or _within_same_finSCC(old_src, old_dst, finSCC_by_node))\
                                else k
                    new_dst = _get_add_node(old_dst, new_dst_k)
                # For "into dead" transitions (lbl, dead) it is possible
                # that it is already present, so we check
                if lbl not in new_src.transitions or (
                        new_dst, False) not in new_src.transitions[lbl]:
                    new_src.add_transition(lbl, {(new_dst, False)})
                else:
                    assert new_dst == dead_node, "I know only the case of repetitions of transitions into dead"
                old_by_new[new_dst] = old_dst
                if new_dst not in processed_nodes:
                    nodes_to_process.add(new_dst)

    return Automaton({_get_add_node(next(iter(atm.init_nodes)), k)},
                     processed_nodes)
Esempio n. 4
0
 def _get_add_node(old_n: Node, k: int) -> Node:
     if k < 0:
         return dead_node
     new_node = new_by_old_k[(old_n, k)] = new_by_old_k.get(
         (old_n, k), Node(old_n.name + 'k' + str(k)))
     new_node.k = k
     return new_node
Esempio n. 5
0
    def _create_automaton(self, node_names, init_node_name, transitions_dict):
        name_to_node = {}

        for name in node_names:
            name_to_node[name] = Node(name)

        for trans_desc, is_acc in transitions_dict.items():
            src_node, dst_node = list(map(lambda name: name_to_node[name],
                                          trans_desc.split('->')))

            src_node.add_transition(LABEL_TRUE, {(dst_node,is_acc)})

        return Automaton({name_to_node[init_node_name]}, set(name_to_node.values()))
    def test_is_absorbing(self):
        node = Node('node')

        true_label = Label({})
        node.add_transition(true_label, [(node, True)])
        node.add_transition(Label({'r':True}), [(node, True)])

        assert is_final_sink(node)
Esempio n. 7
0
def normalize_nbw_transitions(node:NBWNode,
                              transitions:Dict[Label, Set[Tuple[bool, NBWNode]]])\
        -> Dict[Label, Set[Tuple[bool,NBWNode]]]:
    while True:
        # pick two intersecting 'transitions':
        all_intersecting_label_pairs = lfilter(
            lambda l_l: common_label(l_l[0], l_l[1]) is not None,
            combinations(transitions.keys(), 2))
        if not all_intersecting_label_pairs:
            break
        l1, l2 = all_intersecting_label_pairs[0]

        t_split = []  # type: List[Tuple[Label, Set[Tuple[bool, NBWNode]]]]

        t_split.append((common_label(l1,
                                     l2), transitions[l1] | transitions[l2]))

        nl2_labels = negate_label(l2)
        for nl2 in nl2_labels:
            l1_nl2 = common_label(l1, nl2)
            if l1_nl2 is not None:
                t_split.append((l1_nl2, transitions[l1]))

        nl1_labels = negate_label(l1)
        for nl1 in nl1_labels:
            nl1_l2 = common_label(nl1, l2)
            if nl1_l2 is not None:
                t_split.append((nl1_l2, transitions[l2]))

        # NB: we can remove t1 and t2, since the newly generated transitions cover them
        del transitions[l1]
        del transitions[l2]
        # Careful, we may have other transitions with exactly the same label!
        # => we do not replace but rather 'update'
        for (new_lbl, new_transitions) in t_split:
            if new_lbl in transitions:
                transitions[new_lbl].update(new_transitions)
            else:
                transitions[new_lbl] = new_transitions
        # this one is wrong!
        # transitions.update(t_split)

        node._transitions = transitions  # FIXME: fix access to the private member

    return transitions
Esempio n. 8
0
    def test_get_next_states(self):
        state = Node('init')

        sig_r, sig_g = Signal('r'), Signal('g')
        node_r = Node('r')
        node_rg = Node('rg')
        node_nr_g = Node('!rg')

        _ = False
        edge_to_r = {(node_r, _)}
        edge_to_rg = {(node_rg, _)}
        edge_to_not_r_g = {(node_nr_g, _)}

        state.add_transition({sig_r:True}, edge_to_r)
        state.add_transition({sig_r:True, sig_g:True}, edge_to_rg)
        state.add_transition({sig_r:False, sig_g:True}, edge_to_not_r_g)

        next_states = get_next_states(state, Label({sig_r:False, sig_g:False}))
        assert len(next_states) == 0

        next_states = get_next_states(state, Label({sig_r:False, sig_g:True}))
        assert len(next_states) == 1
        self._are_equal_sequences({node_nr_g}, next_states)

        next_states = get_next_states(state, Label({sig_r:True, sig_g:True}))
        assert len(next_states) == 2
        self._are_equal_sequences({node_r, node_rg}, next_states)