Ejemplo n.º 1
0
 def test_string(self):
     lexer.input(r'"ssdf   \"lkjf""sldkjfl"')
     while True:
         token = lexer.token()
         if not token:
             break
         print(token)
Ejemplo n.º 2
0
def tokenize(data):
    lexer.input(data)
    while True:
        tok = lexer.token()
        if not tok: 
            break      # No more input
        print(tok)
Ejemplo n.º 3
0
Archivo: main.py Proyecto: vgerak/milc
def main():
    if len(argv) > 1:
        f = open(argv[1], "r")
        for line in f:
            lexer.input(line)
            while 1:
                tok = lexer.token()
                if not tok: 
                    break
                #uncomment to print tokens
                #print tok
    else:
        print "Interactive mode"
        while 1:
            try:
                sometext = raw_input("> ")
                lexer.input(sometext)
                while 1:
                    tok = lexer.token()
                    if not tok:
                        break
                    #uncomment to print tokens
                    print tok
            except EOFError:
                print "EOF"
                break
Ejemplo n.º 4
0
def parse_file(ppfile):
    '''this needs to open file and parse it'''
    base_dir = '/etc/puppet/manifests/'
    current_file = '%s%s' % (base_dir, ppfile)
    with open(current_file, 'r') as fsite:
        fdata = fsite.read()

    if LDEBUG:
        if DEBUG:
            lexer.input(DATA)
        else:
            lexer.input(fdata)

        while True:
            tok = lexer.token()
            if not tok: 
                break      # No more input
            print tok


    if DEBUG:
        parser.parse(DATA)
    else:
        parser.parse(fdata)
    
#    for parent in dict(t_nodes).keys():
    traverse_node_tree(nodes, dict(t_nodes))
Ejemplo n.º 5
0
 def test_alri(self):
     lexer.input(r'123 + 23* 54')
     while True:
         token = lexer.token()
         if not token:
             break
         print(token)
Ejemplo n.º 6
0
 def test_time(self):
     lexer.input("5m30s")
     while True:
         token = lexer.token()
         if not token:
             break
         print(token)
Ejemplo n.º 7
0
def main():
    if not len(sys.argv) == 2:
        sys.stderr.write("Usage: jack <file_name.jack>\n\tjack <dir_name>\n")
        sys.exit(2)

    jakc_files = None
    path = sys.argv[1:]

    if (path[0].endswith(".jack")):
        jack_files = path[0].split(".")[0] + ".jack"
    else:
        jack_files = [
            path[0] + f for f in os.walk(path[0]).__next__()[2]
            if f.endswith(".jack")
        ]

    vm_files = [f.replace(".jack", ".vm") for f in jack_files]
    print(jack_files)
    print(vm_files)

    for source_file in jack_files:
        with open(source_file, "r") as f:
            source_code = f.read()

        lexer.input(source_code)
        # stage_two(lexer)
        stage_three(lexer)
Ejemplo n.º 8
0
 def test_number(self):
     lexer.input("-0.123")
     while True:
         token = lexer.token()
         if not token:
             break
         print(token)
     self.assertEqual(True, True)
Ejemplo n.º 9
0
def testLexer(lexer, read_data):
    lexer.input(read_data)
    # Tokenize
    while True:
        tok = lexer.token()
        if not tok:
            break  # No more input
        print(tok)
Ejemplo n.º 10
0
def get_types(data):
    lexer.input(data)
    tokens = []
    while True:
        token = lexer.token()
        if not token:
            break
        tokens.append(token.type)
    return tokens
Ejemplo n.º 11
0
def stage_one(source_code):
    lexer.input(source_code)
    tok = lexer.token()
    print("<Tokens>")
    while tok:
        # print("token: %s, value: %s" %(tok.type, tok.value))
        print("\t<%s> %s </%s>" % (tok.type, tok.value, tok.type))
        tok = lexer.token()
    print("</Tokens>")
Ejemplo n.º 12
0
def run():
    with open('cards.xml') as f:
        soup = BeautifulSoup(f, 'xml');
    card_list = soup.cockatrice_carddatabase.cards.find_all('card')
    for card in card_list:
        try:
            lexer.input(card.find('text').text.replace(card.find('name').text, '<self>'))
            print("Card: {}".format(card.find('name').text))
            for tok in lexer:
                pass
            print()
        except ValueError:
            raise
Ejemplo n.º 13
0
def run_lexer(input_file_path):
    # read input file
    code = None
    with open(input_file_path, 'r') as input_file:
        code = input_file.read()
    # Give the lexer some input
    lexer.input(code)
    # Tokenize
    while True:
        tok = lexer.token()
        if not tok:
            break  # No more input
        print(tok)
    return
Ejemplo n.º 14
0
    def onMyToolbarButtonLexico(self, s, label):
        erroresL.clear()
        self.output.setPlainText("")

        #print("Se activa el lexer ")
        data = self.input.toPlainText()
        #print(data)

        lexer.input(data)
        while True:
            tok = lexer.token()
            if not tok:
                break  # No more input

        self.output.insertPlainText("Errores léxicos detectados: " +
                                    str(len(erroresL)) + "\n")
        for i in erroresL:
            self.output.insertPlainText(i + "\n")
Ejemplo n.º 15
0
#EDUARDO DIAZ DEL CASTILLO
#FIRST COMPILE PYTHON3 IN UBUNTU
#RULES IN TERMINAL:
#WRITE IN CONSOLE: python3 index.py diccionario.txt
import sys
from lexer import lexer

if __name__ == "__main__":

    file_name = sys.argv[1]
    f = open(file_name, 'r')

    #leer el archivo y lo pasa al lexer

    data = f.read()
    lexer.input(data)

    #recorre y compara todo
    #si no coincide - error

    while True:

        tok = lexer.token()
        if not tok:
            break
        print("CARACTER VALIDOS >>> ", tok)

print("Cantidad de caracteres en archivo: ", len(data))
Ejemplo n.º 16
0
log.debug('building mailbox_or_url_list parser')
mailbox_or_url_list_parser = yacc.yacc(start='mailbox_or_url_list',
                                       errorlog=log,
                                       tabmodule='mailbox_or_url_list_parsetab')


# Interactive prompt for easy debugging
if __name__ == '__main__':
    while True:
        try:
            s = raw_input('\nflanker> ')
        except KeyboardInterrupt:
            break
        except EOFError:
            break
        if s == '': continue

        print '\nTokens list:\n'
        lexer.input(s)
        while True:
            tok = lexer.token()
            if not tok:
                break
            print tok

        print '\nParsing behavior:\n'
        result = mailbox_or_url_list_parser.parse(s, debug=log)

        print '\nResult:\n'
        print result
Ejemplo n.º 17
0
import os
import io
import glob
import io
from lexer import lexer

curr_dir = os.path.dirname(__file__)

data_path = os.path.join(curr_dir, 'datadump')
aldelo_path = os.path.join(data_path, 'Aldelo')

aldelo_stores = glob.glob(aldelo_path + '/[A-Z]*')

store = aldelo_stores[0]

receipts = glob.glob(store + '/*')
f = io.open(receipts[0], 'r', encoding="utf-8")
for line in f:
    print line
f.close()

f = io.open(receipts[0], 'r', encoding="utf-8")

lexer.input(f.read())
Ejemplo n.º 18
0
log.debug('building mailbox_or_url_list parser')
mailbox_or_url_list_parser = yacc.yacc(
    start='mailbox_or_url_list',
    errorlog=log,
    tabmodule='mailbox_or_url_list_parsetab')

# Interactive prompt for easy debugging
if __name__ == '__main__':
    while True:
        try:
            s = raw_input('\nflanker> ')
        except KeyboardInterrupt:
            break
        except EOFError:
            break
        if s == '': continue

        print '\nTokens list:\n'
        lexer.input(s)
        while True:
            tok = lexer.token()
            if not tok:
                break
            print tok

        print '\nParsing behavior:\n'
        result = mailbox_or_url_list_parser.parse(s, debug=log)

        print '\nResult:\n'
        print result
Ejemplo n.º 19
0
#    p[0] = [struct({'type':'plaintext', 'content':''.join(p[1:])})]

def p_text_bullet(p):
    'text : BULLET text NEWLINE'
    logging.debug(sys._getframe().f_code.co_name)
    indent = len(p[1].split('*')[0])/2 -1
    p[0] = [struct({'type':'bullet', 'indent':indent, 'content':p[2].strip()+p[3]})]

def p_bold(p):
    'text : BOLD PLAINTEXT BOLD'
    p[0] = [struct({'type':'bold', 'content':p[2]})]

def p_italic(p):
    'text : ITALIC PLAINTEXT ITALIC'
    p[0] = [struct({'type':'italic', 'content':p[2]})]

def p_error(p):
    print("Syntax error at '%s'" % repr(p)) #p.value)

import ply.yacc as yacc
parser = yacc.yacc()

if __name__ == '__main__':
    from lexer import lexer
    import pprint
    import sys
    input_string = open(sys.argv[-1]).read()
    lexer.input(input_string)
    parse_tree = parser.parse(lexer=lexer)
    pprint.pprint( parse_tree )
Ejemplo n.º 20
0
# /bin/env python

import sys
from lexer import lexer
from parser import parser
import textools as masters
from pdb import set_trace

md_name   = 'test.md' #sys.argv[-2]
#templates = sys.argv[-1]
#templates = templates.strip('.py')
#module    = __import__(templates)


if __name__ == '__main__':
    md_file = open(md_name).read()
    lexer.input(md_file)
    parse_tree = parser.parse(lexer=lexer)
    tex_name = md_name.replace('md', 'tex')
    with open(tex_name,'w') as output:
        output.write(masters.header)
        for slide in parse_tree:
            if hasattr(masters, slide.master):
                int_slide = hasattr(masters, slide.master)(slide)
                output.write(int_slide)
            else:
                raise AttributeError('No master slide named %s available in the theme collection' % slide.master)

Ejemplo n.º 21
0
 def assertLexerIO(self, lexer_input, expected_output):
     lexer.input(lexer_input)
     actual_output = ''.join([
         ''.join(str(token.value).split()) + token.type for token in lexer
     ])
     self.assertEqual(actual_output, ''.join(expected_output.split()))