class AugmentedDataLoader(DataLoader): def __init__(self, dataset, filen, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): super().__init__(dataset, batch_size, shuffle, sampler, batch_sampler, num_workers, collate_fn, pin_memory, drop_last, timeout, worker_init_fn) self.grammar = SimpleGrammar(filen) self.database_calls def get_choices_key(self, key): return self.grammar.gr[key]['items'] def get_values_terminal(self, terminal): if terminal in resolve_dict: pass else: lst = self.grammar.gr[terminal] assert len(lst) == 1 return lst[0] def is_terminal_on_path(self, tok, terminal): return self.grammar.from_terminal_to_token(tok, terminal)
def __init__(self, dataset, filen, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): super().__init__(dataset, batch_size, shuffle, sampler, batch_sampler, num_workers, collate_fn, pin_memory, drop_last, timeout, worker_init_fn) self.grammar = SimpleGrammar(filen) self.database_calls
class AugmentedDataset: #(Dataset): def __init__(self, jsonfile, grammar_file): self.grammar = SimpleGrammar(grammar_file) self.grammar_terminals = self.grammar.get_terminal_toks() with open(jsonfile, 'r') as f: self.data = json.loads(f.read()) def augment_data(self, test_string): to_learn = self.grammar.learn_ def get_likes_nonlikes(self): l_counter = 0 nl_counter = 0 kk = [k for k in self.data.keys()] for k in kk: string = self.data[k]['sql'] if 'LIKE' in string: l_counter += 1 else: nl_counter += 1 print('{} likes, {} non likes'.format(l_counter, nl_counter)) def test(self): counter = 0 not_resolved = 0 kk = [k for k in self.data.keys()] for k in kk: string = self.data[k]['sql'] val, reas = self.grammar.check_string_tokens(string, verbose=True) if not val: counter += 1 if reas == 'res': not_resolved += 1 print('{} out of {} are errors, {} are with resolution'.format( counter, len(kk), not_resolved))
def main(): args = get_args() discriminator = no_conv_disc(shape=(3, 1)) generator = only_dense_gen(input_dim=100) gram = SimpleGrammar() gan = GAN(discriminator, generator, gram) if args.disc: disc(gan) elif args.oracle: oracle(gan) elif args.full: full(gan)
def __init__(self, jsonfile, grammar_file): self.grammar = SimpleGrammar(grammar_file) self.grammar_terminals = self.grammar.get_terminal_toks() with open(jsonfile, 'r') as f: self.data = json.loads(f.read())
poetry = SimpleGrammar() \ .set_text("#main_structure#")\ .add_tag("main_structure", [\ "#simple_structure#\n\n#simple_structure#\n\n#simple_structure#"\ ])\ .add_tag("simple_structure", [\ "#feeling_statement#\n#world_metaphore#\n#feeling_statement#"\ ])\ .add_tag("feeling_statement", [\ "#feeling# #define_verb# #intense_comparation#" \ ])\ .add_tag("world_metaphore", [\ "#world_object# que #intense_verb# #intense_comparation#"\ ])\ .add_tag("feeling", [\ "Amor", "Ódio", "Ciúmes", "Paixão", "Tristeza", "Raiva", "Fúria",\ "Intensa luxúria"\ ])\ .add_tag("world_object", [\ "Casa", "Muralha", "Castelo", "Praia de #crazy_adjective#",\ "Mar de #crazy_adjective#", "Murada", "Templo de #crazy_adjective#",\ "Águas profundas de #crazy_adjective#"\ ])\ .add_tag("define_verb", [\ "é", "significa", "quer dizer", "se define como",\ "traça sua definição como", "se entrelaça no significado de",\ "se traduz como" \ ])\ .add_tag("intense_verb", [\ "trespassa o significado de", "se liquefaz como"\ ])\ .add_tag("intense_comparation", [\ "nunca viver plenamente", "um animal perdido em sua insanidade",\ "um deslindar na infinitude do instante", "o algoz da eternidade do sentimento",\ "uma muralha infinita no horizonte"\ ])\ .add_tag("crazy_adjective", [\ "tempestuosa alegria", "infinita tristeza", "efêmera contemplação",\ "inepta ânsia", "movimento perpétuo", "doce inexistência",\ "vacuidade existêncial", "beleza infinitesimal", "infinitas impossibilidades"\ ])
from grammar import SimpleGrammar poetry = SimpleGrammar() \ .set_text("#main_structure#")\ .add_tag("main_structure", [\ "#1.feeling# #define_verb# mais do que #1.feeling#\n#capitalize.define_verb# absolutamente nada."\ ])\ .add_tag("feeling", [\ "Amor", "Ódio", "Ciúmes", "Paixão", "Tristeza", "Raiva", "Fúria",\ "Intensa luxúria"\ ])\ .add_tag("define_verb", [\ "é", "significa", "quer dizer", "se define como",\ "traça sua definição como", "se entrelaça no significado de",\ "se traduz como" \ ])\ print(str(poetry))
# -*- coding: utf-8 -*- from grammar import SimpleGrammar poetry = SimpleGrammar() \ .set_text("#main_structure#")\ .add_tag("main_structure", [\ "#begin#\n\n#problem#\n\n#solution#\n\n#ending#"\ ])\ .add_tag("begin", [\ "안녕하세요","안녕"\ "안녕히 주무세요" ])\ .add_tag("problem", [\ "안녕하세요",\ "#iamproblem#" ])\ .add_tag("iamproblem", [\ "저는 #problematic_noun# 입니다" ])\ .add_tag("problematic_noun", [\ "선생", ])\ .add_tag("solution", [\ "안녕하세요"\ ])\ .add_tag("ending", [\ "안녕하세요"\ ]) print(str(poetry))
from grammar import SimpleGrammar sg = SimpleGrammar() sg.add_tag("story", ["#story_beginning# #story_problem# #story_climax# #story_ending#"]) sg.add_tag("story_beginning", ["Once upon a time there was a valiant #animal#"]) sg.add_tag("story_problem", ["that never #difficulty_verb#.", \ "that one day heard some strange words: #strange_calling#"]) sg.add_tag("story_climax", ["Suddenly, he decided to #resolution_verb#."]) sg.add_tag("story_ending", ["Finally he could #result_verb# without worries."]) sg.add_tag("difficulty_verb", ["slept", "danced", "talked"]) sg.add_tag("resolution_verb", ["run", "sing", "give up"]) sg.add_tag("result_verb", ["sleep", "dance", "talk freely"]) sg.add_tag("strange_calling", ["Hello #name#!", "Hello my #writer_object#!"]) sg.add_tag("animal", ["dolphin", "dog", "cat", "lamb", "lion"]) sg.add_tag("name", ["Mr. Gil", "Madame", "Masked Man"]) sg.add_tag("writer_object", ["text", "book", "beloved code"]) print(sg.evaluate("#story#"))