示例#1
0
 def __init__(self, source, filename):
     source.seek(0)
     self._code = source.read()
     self._code = remove_comments(self._code)
     clex = CLexer(self.error_func, self.on_lbrace_func, self.on_lbrace_func, self.type_lookup_func)
     clex.build(optimize=False)
     clex.input(self._code)
     tokens = list(iter(clex.token, None))
     self._parser_tokens = self.__init_tokens(tokens)
     self._parsed_code = self.__get_parsed_code(self._parser_tokens)
 def test_on_rbrace_lbrace(self):
     braces = []
     def on_lbrace():
         braces.append('{')
     def on_rbrace():
         braces.append('}')
     clex = CLexer(self.error_func, on_lbrace, on_rbrace,
                   self.type_lookup_func)
     clex.build(optimize=False)
     clex.input('hello { there } } and again }}{')
     token_list(clex)
     self.assertEqual(braces, ['{', '}', '}', '}', '}', '{'])
示例#3
0
    def tokenize_with_offset(self, code: str) \
            -> Optional[List[Tuple[int, Token[str, str]]]]:
        code = code.replace("\r", "")

        lexer = CLexer(logger.warning, lambda: None, lambda: None,
                       lambda x: False)
        lexer.build(optimize=False)
        lexer.input(code)
        tokens: List[LexToken] = list(iter(lexer.token, None))

        return [(token.lexpos, Token(token.type, token.value, token.value))
                for token in tokens]
示例#4
0
    def lex(self, text):
        #lex the input file

        self._scope_stack = [dict()]  #open new scope list

        #construct new lexer using the pycparser implementation
        lex = CLexer(self._lex_error_func, self._lex_on_lbrace_func\
           ,self._lex_on_rbrace_func, self._lex_type_lookup_func)

        #initiate the lexer
        lex.build()

        lex.input(text)

        list_of_tokens = []
        while 1:
            tok = lex.token()
            if not tok:
                break
            list_of_tokens.append(
                (tok.value, tok.type, tok.lineno, lex.filename, tok.lexpos))
            #print (tok)
        return list_of_tokens
示例#5
0
 def setUp(self):
     self.clex = CLexer(self.error_func, self.type_lookup_func)
     self.clex.build(optimize=False)
     self.error = ""
示例#6
0
文件: lexer.py 项目: xcode2010/ghcc
 def __init__(self) -> None:
     self.lexer = CLexer(self._error_func, self._brace_func,
                         self._brace_func, self._type_lookup_func)
     self.lexer.build(optimize=True, lextab='pycparser.lextab')
示例#7
0
sys.path.append(os.path.join(os.getcwd(), 'asm'))
import asm as asm
import json
from PyQt5.QtCore import QTimer

from PyQt5.QtWebEngineWidgets import *

import signal

from pycparser.c_lexer import CLexer

def _lex_error_func(msg, line, column):
  return
  raise ParseError("%s:%s: %s" % (msg, line, column))

clex = CLexer(_lex_error_func, lambda:None, lambda:None, lambda _:False)
clex.build(optimize=True)

def sub_proc(fn, q, args):
    def myexit(signum, frame):
        print('terminated(sub)',file=sys.stderr)
        exit()
    signal.signal(signal.SIGINT, myexit)

    str_out = StringIO()
    _stdout = sys.stdout
    sys.stdout = str_out
    try:
      fn(*args)
    except Exception as ex:
      print(ex)
示例#8
0
 def __init__(self):
     self.lexer = CLexer(self._callback, self._callback, self._callback,
                         self._callback)
     self.lexer.build(optimize=True,
                      lextab='pycparser.lextab',
                      outputdir='')