def extend_grammar(g, p): '''Extend a FeatureGrammar object with a new (string represented) production.''' start = g._start old = g._productions new = FeatureGrammar.fromstring(p)._productions ret = FeatureGrammar(start, old + new) return ret
def load_grammars(grammar_paths, start_symbol=None): """Loads grammars from a list of files and combines them into a single feature grammar""" if start_symbol is None: start_symbol = FeatStructNonterminal('Sentence') productions = [] for path in grammar_paths: g = nltk.data.load(path, format='fcfg') productions.extend(g.productions()) grammar = FeatureGrammar(start_symbol, productions) return grammar
know_gael_bas = create_fact(know, Gael, Bas) # know(Gael, Bas) know_gael_justine = create_fact(know, Gael, Justine) # know(Gael, Justine) know_justine_bas = create_fact(know, Justine, Bas) # know(Justine, Bas) member_bas_club = create_fact(member, Bas, club) # member(Bas, club) member_gael_bas_club = create_fact(member, [Gael, Bas], club) # member([Gael, Bas], club) # LIST OF FACT THAT WILL BE INPUT TO THE GRAMMAR : list_of_facts = format_list_of_facts([know_gael_justine, member_gael_bas_club]) root = FeatStructNonterminal("Root") start_production = Production(lhs=root, rhs=[list_of_facts]) # CREATE A NEW GRAMMAR THAT COMBINE THE STATIC + DYNAMIC RULES with open("data/fcfg/feature_grammar.fcfg", "r") as grammar_file: grammar_str = grammar_file.read() static_grammar = FeatureGrammar.fromstring(grammar_str) all_productions = static_grammar.productions() + [start_production] + dynamic_productions dynamic_grammar = FeatureGrammar(start=root, productions=all_productions) print(dynamic_grammar) # SAMPLE SOME LEAF FROM THE ROOT feature_root = FeatureGrammarNode(symbols=root, feature_grammar=dynamic_grammar) random.seed(2) # print(feature_root.find_random_valid_leaf_debug()) for i in range(10): random.seed(i) print(feature_root.find_random_valid_leaf())