Esempio n. 1
0
def listlex(lex):
    r = []
    tok = lex.token()
    while tok:
        r.append((tok.type, tok.value))
        tok = lex.token()
    return r
	def getTypeDef(self,lex):
		nameStack=[]
		list_token=[]
		isStruct=False
		struct_token=[]
		tok = lex.token()
		if not tok: None
		list_token.append(tok)
		if tok.type=='STRUCT' or tok.type=='UNION':
			isStruct=True
			struct_token=self.getStructUnion(lex)
		while True:
			tok = lex.token()
			if not tok: break
			list_token.append(tok)
			if tok.type=='SEMICOLON':
				if not nameStack:
					if isStruct==True:
						typedefobj=typedefclass(isStruct, list_token[1:-1],[list_token[0]]+struct_token)
						return typedefobj
					else:
						typedefobj=typedefclass(False,list_token[-2],list_token[:-2])
						return typedefobj
			elif tok.type=='LCURLY':
				nameStack.append(tok)
			elif tok.type=='RCURLY':
				nameStack.pop()
		if isStruct==True:
			typedefobj=typedefclass(isStruct,list_token[1:-1],[list_token[0]]+struct_token)
			return typedefobj
		else:
			typedefobj=typedefclass(isStruct,list_token[-2],list_token[:-2])
		return typedefobj
Esempio n. 3
0
    def run_on_string(self,data):

        self.try_log_debug("==== Running on string\n%s...\n",data[:200])

        lex  = self.lexer.clone()

        #lexer debugging
        lex.input(data)
        if 0:
            tok = lex.token()
            while tok:
                print tok
                tok = lex.token()
            lex.lineno = 1
            lex.input(data)

        parser = yacc.yacc(module=self,
                  debug=self.debug,
                  debugfile=self.debugfile,
                  tabmodule=self.tabmodule)
        #try:
            #self.parser.restart()
        #except AttributeError:
            #pass
        script = parser.parse(lexer=lex,debug=self.logger)
        #print script
        return script
def test_scanner(arg=sys.argv):
    data = ' 1+2 1-2 3*4 x blah y := 5 '

    lex.input(data)

    # attempt to get that first token
    tok = lex.token()
    while tok:
        print tok
        tok = lex.token()
Esempio n. 5
0
def test_scanner(data) :
    """ Test the lexer to make sure we
    don't have any invalid tokens.

    :param data: string data from either
                 a file or text input.
    """
    lex.input(data)

    # attempt to get that first token
    tok = lex.token()
    while tok:
        tok = lex.token()
Esempio n. 6
0
    def _group_lines(self, data):
        """Given an input string, this function splits it into lines.  Trailing whitespace
        is removed.   Any line ending with \ is grouped with the next line.  This
        function forms the lowest level of the preprocessor---grouping into text into
        a line-by-line format.

        """
        lex = self.lexer.clone()
        lines = [x.rstrip() for x in data.splitlines()]
        for i in range(len(lines)):
            j = i + 1
            while lines[i].endswith("\\") and (j < len(lines)):
                lines[i] = lines[i][:-1] + lines[j]
                lines[j] = ""
                j += 1

        data = "\n".join(lines)
        lex.input(data)
        lex.lineno = 1

        current_line = []
        while True:
            tok = lex.token()
            if not tok:
                break
            current_line.append(tok)
            if tok.type in self.t_WS and "\n" in tok.value:
                yield current_line
                current_line = []

        if current_line:
            yield current_line
Esempio n. 7
0
def lextest(data):
    lex.input(data)
    while 1:
        tok = lex.token()
        if not tok:
            break
        print tok
Esempio n. 8
0
def main():
    lex.lex()

    cmd, program_file = check_args()

    if program_file is not None and cmd == 'test':
        parser = yacc.yacc()
        with open(program_file) as f:
            input = f.read()
        progcode = parser.parse(input)
        program = Program(progcode)
        program.run_tests()
    elif program_file is not None and cmd == 'lex':
        with open(program_file) as f:
            input = f.read()
        lex.input(input)
        while True:
            tok = lex.token()
            if not tok:
                break
            print tok
    elif program_file is not None:
        parser = yacc.yacc()
        with open(program_file) as f:
            input = f.read()
        progcode = parser.parse(input)
        program = Program(progcode)
        program.call_function('main', [5])
Esempio n. 9
0
def parse(s):
    global LATok
    print("** parsing: ", s)
    lex.input(s)
    LATok = lex.token()
    result = input()
    print("** result: ", result)
Esempio n. 10
0
 def test(self, data):
     self.lexer.input(data)
     while True:
         tok = lex.token()
         if not tok:
             break
         print tok
def lexer5525_TestMain(argv=None):
    """Lexer Test Cases"""

    # Setup and Check Args
    if argv is None:
        argv = sys.argv
    if len(argv) != 2:
        sys.stderr.write(str(argv[0]) + " requires two arguments\n")
        sys.stderr.write(__doc__ + "\n")
        return 1
    inputFilePath = str(argv[1])
    if(inputFilePath[-3:] != ".py"):
        sys.stderr.write(str(argv[0]) + " input file must be of type *.py\n")
        return 1

    inputFile = open(inputFilePath)
    source = inputFile.read()
    inputFile.close()

    lex.input(source)

    while True:
        tok = lex.token()
        if not tok:
            break
        sys.stdout.write(str(tok) + "\n")

    return 0
    def run(self):
        """Running the parser."""

        logging.debug("running parser with filename: [" + self._filename + "]")

        if self._lexeronly:
            logging.debug("doing *ONLY* lexical analysis, skipping syntactical analysis")
            ## debug output of lexical analysis: (FIXXME: replace with yacc parsing)
            for line in fileinput.input([self._filename]):
    
                logging.info("     processing line: [" + line.strip() + "]")
                
                ## Give the lexer some input
                lex.input(line)
                
                # Tokenize
                while True:
                    token = lex.token()
                    if not token: break      # No more input
                    logging.debug(str(token))

        else:
            yacc.parse(open(self._filename).read())
    
        ## report number of errors
        if self._numerrors>0:
            logging.critical("-> " + str(self._numerrors) + " ERRORS found while parsing " + self._filename)
        else:
            logging.info("No errors found while parsing " + self._filename)
Esempio n. 13
0
def tokenize(string):
    lex.input(string)
    while True:
        tok = lex.token()
        if not tok: 
            break
        yield tok
Esempio n. 14
0
    def group_lines(self, input):
        lex = self.lexer.clone()
        lines = [x.rstrip() for x in input.splitlines()]
        for i in xrange(len(lines)):
            j = i+1
            while lines[i].endswith('\\') and (j < len(lines)):
                lines[i] = lines[i][:-1]+lines[j]
                lines[j] = ""
                j += 1

        input = "\n".join(lines)
        lex.input(input)
        lex.lineno = 1

        current_line = []
        while True:
            tok = lex.token()
            if not tok:
                break
            current_line.append(tok)
            if tok.type in self.t_WS and '\n' in tok.value:
                yield current_line
                current_line = []

        if current_line:
            yield current_line
Esempio n. 15
0
def run_lex(text):
	lex.lex()
	lex.input(text)
	while True:
		token = lex.token()
		if not token: break
		print token
def get_tokens():
    tokens = []
    while True:
        tok = lex.token()
        if not tok:
            break
        tokens.append(tok)
    return tokens
Esempio n. 17
0
def token(t):
    global LATok
    if not LATok:
        return t == "EOF"
    if LATok.type != t:
        return False
    LATok = lex.token()
    return True
Esempio n. 18
0
def token_list(s):
    lexer.input(s)
    tokens = []
    while True:
        tok = lex.token()
        if not tok: break      # No more input
        tokens.append(tok)
    return tokens
Esempio n. 19
0
def dump(lex, buf):
    i = 0
    lex.input(buf)
    while(True):
        tok = lex.token()
        if not tok: break
        print "[",i,"] ", tok
        i+=1
Esempio n. 20
0
 def test_RESERVE1(self):
     self.lexer.input("while")
     print("Test No.9: RESERVE")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, 'WHILE')
     self.assertEqual(tok.value, "_")
     print("Test end.")
     pass
Esempio n. 21
0
 def test_IDENTIFIER2(self):
     self.lexer.input("bufferf1.UK1")
     print("Test No.8: IDENTIFIER")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, 'IDENTIFIER')
     self.assertEqual(tok.value, "bufferf1.UK1")
     print("Test end.")
     pass
Esempio n. 22
0
 def test_REAL16(self):
     self.lexer.input("0xaab.cd")
     print("Test No.6: REAL16 with input 0xaab.cd")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, 'REAL16')
     self.assertEqual(tok.value, hex_to_dec("0xaab.cd"))
     print("Test end.")
     pass
Esempio n. 23
0
 def test_REAL8(self):
     self.lexer.input("0712.726")
     print("Test No.5: REAL8 with input 0712.726")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, 'REAL8')
     self.assertEqual(tok.value, oct_to_dec("0712.726"))
     print("Test end.")
     pass
Esempio n. 24
0
def test(expr):
    lex.input(expr)

    list_tok = []
    while True:
        tok = lex.token()
        if not tok: break
        list_tok.append(tok)
    return list_tok
Esempio n. 25
0
def testing_LESSTHAN():
    lex.input(test_Operations[1])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(LESSTHAN,'<',1,0)") == str(tokens[0])
Esempio n. 26
0
def testing_TIMES():
        lex.input(test_Operations[4])
        tokens.list()
        while True:
                tok = lex.token()
                tokens.append(tok)
                if not tok:
                        break
        assert("LexToken(Times,'*',1,0)") == str(tokens[4])
Esempio n. 27
0
def testing_ELSE():
    lex.input(test_Reserved_words[2])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(ELSE,'else',1,0)") == str(tokens[0])
Esempio n. 28
0
def testing_COLON():
    lex.input(test_colon)
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(COLON,';',1,0)") == str(tokens[0])
Esempio n. 29
0
def testing_PLUS():
        lex.input(test_Operations[2])
        tokens = list()
        while True:
                tok = lex.token()
                tokens.append(tok)
                if not tok:
                        break
        assert("LexToken(PLUS,'+',1,0)") == str(tokens[0])
Esempio n. 30
0
def testing_PRINT():
    lex.input(test_Reserved_words[1])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(PRINT,'print',1,0)") == str(tokens[0])
Esempio n. 31
0
def testing_CLOSEBRACE():
    lex.input(test_Braces[1])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(CLOSINGBRACE,'}',1,0)") == str(tokens[0])
Esempio n. 32
0
def testing_CLOSINGPARA():
    lex.input(test_Braces[3])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(CLOSINGPARA,')',1,0)") == str(tokens[0])
Esempio n. 33
0
 def test_RESERVE2(self):
     self.lexer.input("else")
     print("Test No.10: RESERVE")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, 'ELSE')
     self.assertEqual(tok.value, "_")
     print("Test end.")
     pass
        def getAllTokens(self):
        	#fd = open(self.filename)
		#text = "".join(fd.readlines())
		self.input(self.text)
        	while True:
			tok = lex.token()
			if not tok: break
			self.program.append(tok)
		return self.program
Esempio n. 35
0
 def test_SIGNAL1(self):
     self.lexer.input("(")
     print("Test No.12: SIGNAL")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, '(')
     self.assertEqual(tok.value, "_")
     print("Test end.")
     pass
 def test(self, code):
     lex.input(code)
     while True:
         t = lex.token()
         if not t:
             break
         print(
             f'Tipo: {t.type:10} Valor: {t.value:14} Linha: {t.lineno:<3} Posicao: {t.lexpos}'
         )
Esempio n. 37
0
 def test_REAL10(self):
     self.lexer.input("20.342")
     print("Test No.4: REAL10 with input 20.342")
     tok = lex.token()
     print(repr(tok.type), repr(tok.value))
     self.assertEqual(tok.type, 'REAL10')
     self.assertEqual(tok.value, 20.342)
     print("Test end.")
     pass
Esempio n. 38
0
 def getAllTokens(self):
     #fd = open(self.filename)
     #text = "".join(fd.readlines())
     self.input(self.text)
     while True:
         tok = lex.token()
         if not tok: break
         self.program.append(tok)
     return self.program
Esempio n. 39
0
	def test(self, data):
		result = ""

		self.lexer.input(data)
		while True:
			tok = lexer.token()
			if not tok: break
			result = result + "\n" + str(tok)

		return result.strip()
Esempio n. 40
0
def my_lexer(perl_inp):
    file = open ("my_lexer_op","w+")
    lex.input(perl_inp)
    while True:
        tok = lex.token()
        if not tok: break
        else : 
               #print(str(tok))
               file.write(str(tok))
               file.write("\n")  
Esempio n. 41
0
def testing_NAME():
    """testing of t_NAME"""
    lex.input(tests[5])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break
    assert("LexToken(NAME,'GOODSHIP1',1,0)") == str(tokens[0])
Esempio n. 42
0
def testing_NUMBER():
    """testing of t_number"""
    lex.input(tests[1])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break      # No more input
    assert("LexToken(NUMBER,'200',1,0)") == str(tokens[0])
Esempio n. 43
0
def t_okens(expresion):
    lex.input(expresion)
    while True:
        tok = lex.token()
        if not tok: break
        if str(tok.value) in reserverd_words:
            tok.type = tok.value
            print(tok.value)
        lista.append(str(tok.value) + " -> " + str(tok.type))
    return lista
Esempio n. 44
0
 def tokens(self, data):
     lex.input(data)
     tokens_dict = {}
     while True:
         tok = lex.token()
         if not tok:
             break
         tokens_dict[tok.type] = tokens_dict.get(tok.type, [])
         tokens_dict[tok.type].append(tok.value)
     return tokens_dict
Esempio n. 45
0
def testing_STRING():
    """testing of t_number"""
    lex.input(tests[0])
    tokens = list()
    while True:
        tok = lex.token()
        tokens.append(tok)
        if not tok:
            break      # No more input
    assert("LexToken(STRING,'hello',1,0)") == str(tokens[1])
Esempio n. 46
0
 def test(self, code):
     saida = io.open("saida.txt", mode="w", encoding="utf-8")
     lex.input(code)
     while True:
         t = lex.token()
         if not t:
             break
         print(t)
         saida.write(str(t) + "\n")
     saida.close()
Esempio n. 47
0
	def saida(self, code):
	    out = io.open("saida.txt", mode="w", encoding="utf-8")
	    lex.input(code)
	    while True:
	    	tok = lex.token()
	    	if not tok:
	    		break
	    	print(tok)
	    	out.write(str(tok) + "\n")
	    out.close()
Esempio n. 48
0
def lectura(entrada):
    linea = entrada.readlines()

    for a in range(0, len(linea)):
        lex.input(linea[a])
        while True:
            tok = lex.token()
            if not tok: break
            print str(tok.value) + " - " + str(tok.type)
        print("----------------------------")
Esempio n. 49
0
 def get_token():
     '''a tokenizer that automatically feeds the lexer with the next line'''
     while True:
         tok = lex.token()
         if tok is not None: return tok
         try:
             line = next(file)
             lex.input(line)
         except StopIteration:
             return None
Esempio n. 50
0
 def insertSimbols(self, code, ant):
     lex.input(code)
     while True:
         token = lex.token()
         if not token:
             break
         if token.type == "ID" and (ant.type == "DOIS_PONTOS" or ant.type == "VIRGULA"):
             simbol = Simbol(str(token.type), token.value, "", "", 0, None , token.lineno, token.lexpos, False, 0)
             self.table.simbols.append(simbol)
         ant = token
Esempio n. 51
0
def do_lex():
    # Build the lexer
    lex.lex()
    return

    lex.input(sometext)
    while 1:
        tok = lex.token()
        if not tok: break
        print tok
Esempio n. 52
0
def print_tokens(f):
    if isinstance(f, basestring):
        f = open(f)

    lex.input(f.read())
    while True:
        tok = lex.token()
        if not tok:
            break
        print tok
Esempio n. 53
0
def compile(template):
    lex.lex()
    # lex.lexer.push_state('mu')
    lex.input(template)
    while 1:
        tok = lex.token()
        if not tok: break
        print tok
    yacc.yacc()
    return yacc.parse(template)
Esempio n. 54
0
def lex_text(text):
    lexer = get_lexer()
    lexer.input(text)
    result = []
    while True:
        token = lex.token()
        if token:
            result.append(token)
        else:
            break
    return result
Esempio n. 55
0
def analyse_lex(filename):
    prog = open(filename).read()
    
    lex.input(prog)
    
    while 1:
		tok = lex.token()
		if not tok: break
		print ("line %d: %s(%s)" % (tok.lineno, tok.type, tok.value))
    
    return not errorOccured
Esempio n. 56
0
def lex_text(text):
    lexer = get_lexer()
    lexer.input(text)
    result = []
    while True:
        token = lex.token()
        if token:
            result.append(token)
        else:
            break
    return result
def main(in_file, out_file):
	in_data = in_file.read()
	lex.input(in_data)
	while True:
		t = lex.token()
		if not t:
			break

		token_str = get_token_str(t)
		out_file.write(token_str + '\n')
		print(token_str)
def main( arg=sys.argv ) :

		# Now, this lexer actually takes a string; it doesn't (that I yet know)
		# read from a file.  So, you can parse the file as you like, and feed it
		# to the lexer.
	
	# we're going to read a line at a time from stdin

	line_cnt = 0

	for line in sys.stdin :

		lex.input( line )

		line_cnt += 1
		print "\nLine #", line_cnt

			# attempt to get that first token
		tok = lex.token()
		while tok :
			print tok
			tok = lex.token()
	def getStructUnion(self,lex):
		nameStack=[]
		list_token=[]
		while True:
			tok = lex.token()
			if not tok: break
			list_token.append(tok)
			if tok.type=='LCURLY':
				nameStack.append(tok)
			elif tok.type=='RCURLY':
				nameStack.pop()
			if not nameStack: return list_token
		return list_token