Example #1
0
def train_and_extract_tomita(tomita_grammar,
                             acc_stop=1.,
                             loss_stop=0.005,
                             load=False):
    tomita_alphabet = ["0", "1"]

    if not load:
        rnn = train_RNN_on_tomita_grammar(tomita_grammar,
                                          acc_stop=acc_stop,
                                          loss_stop=loss_stop)
    else:
        rnn = train_RNN_on_tomita_grammar(tomita_grammar, train=False)
        rnn.load(f"RNN_Models/tomita_{tomita_grammar}.model")

    sul = RnnBinarySUL(rnn)
    alphabet = tomita_alphabet

    state_eq_oracle = StatePrefixEqOracle(alphabet,
                                          sul,
                                          walks_per_state=1000,
                                          walk_len=5)

    dfa = run_Lstar(alphabet=alphabet,
                    sul=sul,
                    eq_oracle=state_eq_oracle,
                    automaton_type='dfa',
                    cache_and_non_det_check=True)

    save_automaton_to_file(dfa,
                           f'LearnedAutomata/learned_tomita{tomita_grammar}')
    visualize_automaton(dfa)
Example #2
0
def train_and_extract_bp(path="TrainingDataAndAutomata/balanced()_1.txt",
                         load=False):
    bp_alphabet = list(string.ascii_lowercase + "()")

    x, y = parse_data(path)
    x_train, y_train, x_test, y_test = preprocess_binary_classification_data(
        x, y, bp_alphabet)

    # CHANGE PARAMETERS OF THE RNN if you want
    rnn = RNNClassifier(bp_alphabet,
                        output_dim=2,
                        num_layers=2,
                        hidden_dim=50,
                        x_train=x_train,
                        y_train=y_train,
                        x_test=x_test,
                        y_test=y_test,
                        batch_size=18,
                        nn_type="GRU")

    data_index = path[-5]
    if not load:
        rnn.train(stop_acc=1., stop_epochs=3, verbose=True)
        rnn.save(f"RNN_Models/balanced_parentheses{data_index}.rnn")
    else:
        rnn.load(f"RNN_Models/balanced_parentheses{data_index}.rnn")

    sul = RnnBinarySUL(rnn)
    alphabet = bp_alphabet

    state_eq_oracle = TransitionFocusOracle(alphabet,
                                            sul,
                                            num_random_walks=500,
                                            walk_len=30,
                                            same_state_prob=0.3)

    dfa = run_Lstar(alphabet=alphabet,
                    sul=sul,
                    eq_oracle=state_eq_oracle,
                    automaton_type='dfa',
                    cache_and_non_det_check=False,
                    max_learning_rounds=5)

    save_automaton_to_file(
        dfa, f'LearnedAutomata/balanced_parentheses{data_index}')
    return dfa
Example #3
0
 def __str__(self):
     """
     :return: A string representation of the automaton
     """
     from aalpy.utils import save_automaton_to_file
     return save_automaton_to_file(self,
                                   path='learnedModel',
                                   file_type='string')
        visualize_automaton(learned_model, display_same_state_trans=True)

    return learned_model


def learn_coffee_machine_mbd(visualize=False):
    sul = FaultInjectedCoffeeMachineSUL()
    alphabet = ['coin', 'button', 'coin_double_value', 'button_no_effect']

    eq_oracle = RandomWMethodEqOracle(alphabet,
                                      sul,
                                      walks_per_state=5000,
                                      walk_len=20)

    learned_model = run_Lstar(alphabet,
                              sul,
                              eq_oracle,
                              automaton_type='mealy',
                              cache_and_non_det_check=False)

    if visualize:
        visualize_automaton(learned_model, display_same_state_trans=True)

    return learned_model


if __name__ == '__main__':
    model = learn_crossroad(False)
    save_automaton_to_file(model, path='CrossroadModelFull')
    #visualize_automaton(model, display_same_state_trans=True)
Example #5
0
 def save(self, file_path='LearnedModel'):
     from aalpy.utils import save_automaton_to_file
     save_automaton_to_file(self, path=file_path)
Example #6
0
sul = RnnBinarySUL(rnn)
alphabet = tomita_alphabet

state_eq_oracle = StatePrefixEqOracle(alphabet,
                                      sul,
                                      walks_per_state=200,
                                      walk_len=6)

dfa = run_Lstar(alphabet=alphabet,
                sul=sul,
                eq_oracle=state_eq_oracle,
                automaton_type='dfa',
                cache_and_non_det_check=True)

save_automaton_to_file(dfa, f'RNN_Models/tomita{3}')
visualize_automaton(dfa)

# train and extract balanced parentheses
bp_model = train_and_extract_bp(
    path='TrainingDataAndAutomata/balanced()_2.txt', load=False)
print("Print extracted model")
print(bp_model)

# train and learn mealy machine example
coffee_machine_automaton = train_RNN_and_extract_FSM('coffee')
save_automaton_to_file(coffee_machine_automaton, 'CoffeeMachineModel')

mqtt_automaton = train_RNN_and_extract_FSM('mqtt')
save_automaton_to_file(coffee_machine_automaton, 'MqttModel')