예제 #1
0
def process(content: str, e: Evaluator, duration: int):
    try:
        t = Tokenizer(content)
        t.tokenize()
        p = Parser(t).parseProgram()
        return e.evaluate(p, duration=duration)
    except (TokenizationError, ParseError) as e:
        print("\nERROR: %s\n" % str(e))
        return 2, e
예제 #2
0
 def test(self):
     content = "NOT A REAL PROGRAM"
     try:
         t = Tokenizer(content)
         t.tokenize()
         program: ast.Program = Parser(t).parseProgram()
         result: int = Evaluator(graphics=False).evaluate(program,
                                                          duration=5000)
         self.fail()
     except TokenizationError:
         pass
예제 #3
0
 def run_compile(content: str) -> str:
     token_obj = Tokenizer(content)
     return token_obj.check_next()
예제 #4
0
 def run_compile(content: str) -> List[str]:
     return Tokenizer(content).tokenize()
예제 #5
0
 def run_compile3(content: str) -> str:
     token_obj = Tokenizer(content)
     token_obj.get_next()
     token_obj.get_next()
     return token_obj.get_next()
예제 #6
0
 def run_compile(content: str) -> str:
     return Tokenizer(content).get_next()
예제 #7
0
 def run_compile(content: str) -> bool:
     return Tokenizer(content).more_tokens()
예제 #8
0
 def run_compile(content: str):
     try:
         Tokenizer(content).get_and_check_next('source')
         self.fail()
     except TokenizationError:
         pass
예제 #9
0
 def run_compile(content: str) -> str:
     return Tokenizer(content).get_and_check_next('program:')
예제 #10
0
 def run_compile2(content: str) -> bool:
     token_obj = Tokenizer(content)
     token_obj.get_next()
     token_obj.get_next()
     return token_obj.check_token('program:')
예제 #11
0
 def run_compile(content: str) -> bool:
     token_obj = Tokenizer(content)
     return token_obj.check_token('program:')
예제 #12
0
 def parse(content) -> ast.Program:
     t = Tokenizer(content)
     t.tokenize()
     return Parser(t).parseProgram()
예제 #13
0
 def run_compile(content: str) -> Program:
     t = Tokenizer(content)
     t.tokenize()
     return Parser(t).parseProgram()