from tokenizer import Tokenizer from parsers import Parser sentence = ' ' tokenizer = Tokenizer() parser = Parser() accept_nb = reject_nb = 0 while True: sentence = input() if len(sentence) > 0: parser.set_parser() tokenizer.set_tokenizer(sentence) check = True while not tokenizer.is_end(): token = tokenizer.next() # print(token) if token['status'] == 'ERROR': break if not parser.parsing(token['status']): check = False break if tokenizer.is_end() and check and parser.is_accept(): # print('accept') accept_nb += 1 else: # print('reject') reject_nb += 1 print('accept: %d, reject: %d' % (accept_nb, reject_nb))
from tokenizer import Tokenizer from parsers import Parser tokenizer = Tokenizer() parser = Parser() reject_count = 0 accept_count = 0 while True: string = input() tokenizer.setting(string) parser.setting() check = True while not tokenizer.end_string(): token = tokenizer.next() # print(token) check = parser.parsing(token['state']) if not check: print('reject') reject_count += 1 break if check: if parser.is_accept(): print('accept') accept_count += 1 else: print('reject') reject_count += 1 print('accept: %d, reject: %d' % (accept_count, reject_count))