def test_string(self): lexer.input(r'"ssdf \"lkjf""sldkjfl"') while True: token = lexer.token() if not token: break print(token)
def tokenize(data): lexer.input(data) while True: tok = lexer.token() if not tok: break # No more input print(tok)
def main(): if len(argv) > 1: f = open(argv[1], "r") for line in f: lexer.input(line) while 1: tok = lexer.token() if not tok: break #uncomment to print tokens #print tok else: print "Interactive mode" while 1: try: sometext = raw_input("> ") lexer.input(sometext) while 1: tok = lexer.token() if not tok: break #uncomment to print tokens print tok except EOFError: print "EOF" break
def parse_file(ppfile): '''this needs to open file and parse it''' base_dir = '/etc/puppet/manifests/' current_file = '%s%s' % (base_dir, ppfile) with open(current_file, 'r') as fsite: fdata = fsite.read() if LDEBUG: if DEBUG: lexer.input(DATA) else: lexer.input(fdata) while True: tok = lexer.token() if not tok: break # No more input print tok if DEBUG: parser.parse(DATA) else: parser.parse(fdata) # for parent in dict(t_nodes).keys(): traverse_node_tree(nodes, dict(t_nodes))
def test_alri(self): lexer.input(r'123 + 23* 54') while True: token = lexer.token() if not token: break print(token)
def test_time(self): lexer.input("5m30s") while True: token = lexer.token() if not token: break print(token)
def main(): if not len(sys.argv) == 2: sys.stderr.write("Usage: jack <file_name.jack>\n\tjack <dir_name>\n") sys.exit(2) jakc_files = None path = sys.argv[1:] if (path[0].endswith(".jack")): jack_files = path[0].split(".")[0] + ".jack" else: jack_files = [ path[0] + f for f in os.walk(path[0]).__next__()[2] if f.endswith(".jack") ] vm_files = [f.replace(".jack", ".vm") for f in jack_files] print(jack_files) print(vm_files) for source_file in jack_files: with open(source_file, "r") as f: source_code = f.read() lexer.input(source_code) # stage_two(lexer) stage_three(lexer)
def test_number(self): lexer.input("-0.123") while True: token = lexer.token() if not token: break print(token) self.assertEqual(True, True)
def testLexer(lexer, read_data): lexer.input(read_data) # Tokenize while True: tok = lexer.token() if not tok: break # No more input print(tok)
def get_types(data): lexer.input(data) tokens = [] while True: token = lexer.token() if not token: break tokens.append(token.type) return tokens
def stage_one(source_code): lexer.input(source_code) tok = lexer.token() print("<Tokens>") while tok: # print("token: %s, value: %s" %(tok.type, tok.value)) print("\t<%s> %s </%s>" % (tok.type, tok.value, tok.type)) tok = lexer.token() print("</Tokens>")
def run(): with open('cards.xml') as f: soup = BeautifulSoup(f, 'xml'); card_list = soup.cockatrice_carddatabase.cards.find_all('card') for card in card_list: try: lexer.input(card.find('text').text.replace(card.find('name').text, '<self>')) print("Card: {}".format(card.find('name').text)) for tok in lexer: pass print() except ValueError: raise
def run_lexer(input_file_path): # read input file code = None with open(input_file_path, 'r') as input_file: code = input_file.read() # Give the lexer some input lexer.input(code) # Tokenize while True: tok = lexer.token() if not tok: break # No more input print(tok) return
def onMyToolbarButtonLexico(self, s, label): erroresL.clear() self.output.setPlainText("") #print("Se activa el lexer ") data = self.input.toPlainText() #print(data) lexer.input(data) while True: tok = lexer.token() if not tok: break # No more input self.output.insertPlainText("Errores léxicos detectados: " + str(len(erroresL)) + "\n") for i in erroresL: self.output.insertPlainText(i + "\n")
#EDUARDO DIAZ DEL CASTILLO #FIRST COMPILE PYTHON3 IN UBUNTU #RULES IN TERMINAL: #WRITE IN CONSOLE: python3 index.py diccionario.txt import sys from lexer import lexer if __name__ == "__main__": file_name = sys.argv[1] f = open(file_name, 'r') #leer el archivo y lo pasa al lexer data = f.read() lexer.input(data) #recorre y compara todo #si no coincide - error while True: tok = lexer.token() if not tok: break print("CARACTER VALIDOS >>> ", tok) print("Cantidad de caracteres en archivo: ", len(data))
log.debug('building mailbox_or_url_list parser') mailbox_or_url_list_parser = yacc.yacc(start='mailbox_or_url_list', errorlog=log, tabmodule='mailbox_or_url_list_parsetab') # Interactive prompt for easy debugging if __name__ == '__main__': while True: try: s = raw_input('\nflanker> ') except KeyboardInterrupt: break except EOFError: break if s == '': continue print '\nTokens list:\n' lexer.input(s) while True: tok = lexer.token() if not tok: break print tok print '\nParsing behavior:\n' result = mailbox_or_url_list_parser.parse(s, debug=log) print '\nResult:\n' print result
import os import io import glob import io from lexer import lexer curr_dir = os.path.dirname(__file__) data_path = os.path.join(curr_dir, 'datadump') aldelo_path = os.path.join(data_path, 'Aldelo') aldelo_stores = glob.glob(aldelo_path + '/[A-Z]*') store = aldelo_stores[0] receipts = glob.glob(store + '/*') f = io.open(receipts[0], 'r', encoding="utf-8") for line in f: print line f.close() f = io.open(receipts[0], 'r', encoding="utf-8") lexer.input(f.read())
log.debug('building mailbox_or_url_list parser') mailbox_or_url_list_parser = yacc.yacc( start='mailbox_or_url_list', errorlog=log, tabmodule='mailbox_or_url_list_parsetab') # Interactive prompt for easy debugging if __name__ == '__main__': while True: try: s = raw_input('\nflanker> ') except KeyboardInterrupt: break except EOFError: break if s == '': continue print '\nTokens list:\n' lexer.input(s) while True: tok = lexer.token() if not tok: break print tok print '\nParsing behavior:\n' result = mailbox_or_url_list_parser.parse(s, debug=log) print '\nResult:\n' print result
# p[0] = [struct({'type':'plaintext', 'content':''.join(p[1:])})] def p_text_bullet(p): 'text : BULLET text NEWLINE' logging.debug(sys._getframe().f_code.co_name) indent = len(p[1].split('*')[0])/2 -1 p[0] = [struct({'type':'bullet', 'indent':indent, 'content':p[2].strip()+p[3]})] def p_bold(p): 'text : BOLD PLAINTEXT BOLD' p[0] = [struct({'type':'bold', 'content':p[2]})] def p_italic(p): 'text : ITALIC PLAINTEXT ITALIC' p[0] = [struct({'type':'italic', 'content':p[2]})] def p_error(p): print("Syntax error at '%s'" % repr(p)) #p.value) import ply.yacc as yacc parser = yacc.yacc() if __name__ == '__main__': from lexer import lexer import pprint import sys input_string = open(sys.argv[-1]).read() lexer.input(input_string) parse_tree = parser.parse(lexer=lexer) pprint.pprint( parse_tree )
# /bin/env python import sys from lexer import lexer from parser import parser import textools as masters from pdb import set_trace md_name = 'test.md' #sys.argv[-2] #templates = sys.argv[-1] #templates = templates.strip('.py') #module = __import__(templates) if __name__ == '__main__': md_file = open(md_name).read() lexer.input(md_file) parse_tree = parser.parse(lexer=lexer) tex_name = md_name.replace('md', 'tex') with open(tex_name,'w') as output: output.write(masters.header) for slide in parse_tree: if hasattr(masters, slide.master): int_slide = hasattr(masters, slide.master)(slide) output.write(int_slide) else: raise AttributeError('No master slide named %s available in the theme collection' % slide.master)
def assertLexerIO(self, lexer_input, expected_output): lexer.input(lexer_input) actual_output = ''.join([ ''.join(str(token.value).split()) + token.type for token in lexer ]) self.assertEqual(actual_output, ''.join(expected_output.split()))