def main(): if len(argv) > 1: f = open(argv[1], "r") for line in f: lexer.input(line) while 1: tok = lexer.token() if not tok: break #uncomment to print tokens #print tok else: print "Interactive mode" while 1: try: sometext = raw_input("> ") lexer.input(sometext) while 1: tok = lexer.token() if not tok: break #uncomment to print tokens print tok except EOFError: print "EOF" break
def stage_one(source_code): lexer.input(source_code) tok = lexer.token() print("<Tokens>") while tok: # print("token: %s, value: %s" %(tok.type, tok.value)) print("\t<%s> %s </%s>" % (tok.type, tok.value, tok.type)) tok = lexer.token() print("</Tokens>")
def tokenize(data): lexer.input(data) while True: tok = lexer.token() if not tok: break # No more input print(tok)
def test_string(self): lexer.input(r'"ssdf \"lkjf""sldkjfl"') while True: token = lexer.token() if not token: break print(token)
def parse_file(ppfile): '''this needs to open file and parse it''' base_dir = '/etc/puppet/manifests/' current_file = '%s%s' % (base_dir, ppfile) with open(current_file, 'r') as fsite: fdata = fsite.read() if LDEBUG: if DEBUG: lexer.input(DATA) else: lexer.input(fdata) while True: tok = lexer.token() if not tok: break # No more input print tok if DEBUG: parser.parse(DATA) else: parser.parse(fdata) # for parent in dict(t_nodes).keys(): traverse_node_tree(nodes, dict(t_nodes))
def test_alri(self): lexer.input(r'123 + 23* 54') while True: token = lexer.token() if not token: break print(token)
def test_time(self): lexer.input("5m30s") while True: token = lexer.token() if not token: break print(token)
def test_number(self): lexer.input("-0.123") while True: token = lexer.token() if not token: break print(token) self.assertEqual(True, True)
def testLexer(lexer, read_data): lexer.input(read_data) # Tokenize while True: tok = lexer.token() if not tok: break # No more input print(tok)
def get_types(data): lexer.input(data) tokens = [] while True: token = lexer.token() if not token: break tokens.append(token.type) return tokens
def run_lexer(input_file_path): # read input file code = None with open(input_file_path, 'r') as input_file: code = input_file.read() # Give the lexer some input lexer.input(code) # Tokenize while True: tok = lexer.token() if not tok: break # No more input print(tok) return
def onMyToolbarButtonLexico(self, s, label): erroresL.clear() self.output.setPlainText("") #print("Se activa el lexer ") data = self.input.toPlainText() #print(data) lexer.input(data) while True: tok = lexer.token() if not tok: break # No more input self.output.insertPlainText("Errores léxicos detectados: " + str(len(erroresL)) + "\n") for i in erroresL: self.output.insertPlainText(i + "\n")
log.debug('building mailbox_or_url_list parser') mailbox_or_url_list_parser = yacc.yacc(start='mailbox_or_url_list', errorlog=log, tabmodule='mailbox_or_url_list_parsetab') # Interactive prompt for easy debugging if __name__ == '__main__': while True: try: s = raw_input('\nflanker> ') except KeyboardInterrupt: break except EOFError: break if s == '': continue print '\nTokens list:\n' lexer.input(s) while True: tok = lexer.token() if not tok: break print tok print '\nParsing behavior:\n' result = mailbox_or_url_list_parser.parse(s, debug=log) print '\nResult:\n' print result
log.debug('building mailbox_or_url_list parser') mailbox_or_url_list_parser = yacc.yacc( start='mailbox_or_url_list', errorlog=log, tabmodule='mailbox_or_url_list_parsetab') # Interactive prompt for easy debugging if __name__ == '__main__': while True: try: s = raw_input('\nflanker> ') except KeyboardInterrupt: break except EOFError: break if s == '': continue print '\nTokens list:\n' lexer.input(s) while True: tok = lexer.token() if not tok: break print tok print '\nParsing behavior:\n' result = mailbox_or_url_list_parser.parse(s, debug=log) print '\nResult:\n' print result
def real_token(self): return lexer.token(self)