Exemple #1
0
def process(content: str, e: Evaluator, duration: int):
    try:
        t = Tokenizer(content)
        t.tokenize()
        p = Parser(t).parseProgram()
        return e.evaluate(p, duration=duration)
    except (TokenizationError, ParseError) as e:
        print("\nERROR: %s\n" % str(e))
        return 2, e
Exemple #2
0
 def test(self):
     content = "NOT A REAL PROGRAM"
     try:
         t = Tokenizer(content)
         t.tokenize()
         program: ast.Program = Parser(t).parseProgram()
         result: int = Evaluator(graphics=False).evaluate(program,
                                                          duration=5000)
         self.fail()
     except TokenizationError:
         pass
Exemple #3
0
 def run_compile(content: str) -> str:
     token_obj = Tokenizer(content)
     return token_obj.check_next()
Exemple #4
0
 def run_compile(content: str) -> List[str]:
     return Tokenizer(content).tokenize()
Exemple #5
0
 def run_compile3(content: str) -> str:
     token_obj = Tokenizer(content)
     token_obj.get_next()
     token_obj.get_next()
     return token_obj.get_next()
Exemple #6
0
 def run_compile(content: str) -> str:
     return Tokenizer(content).get_next()
Exemple #7
0
 def run_compile(content: str) -> bool:
     return Tokenizer(content).more_tokens()
Exemple #8
0
 def run_compile(content: str):
     try:
         Tokenizer(content).get_and_check_next('source')
         self.fail()
     except TokenizationError:
         pass
Exemple #9
0
 def run_compile(content: str) -> str:
     return Tokenizer(content).get_and_check_next('program:')
Exemple #10
0
 def run_compile2(content: str) -> bool:
     token_obj = Tokenizer(content)
     token_obj.get_next()
     token_obj.get_next()
     return token_obj.check_token('program:')
Exemple #11
0
 def run_compile(content: str) -> bool:
     token_obj = Tokenizer(content)
     return token_obj.check_token('program:')
Exemple #12
0
 def parse(content) -> ast.Program:
     t = Tokenizer(content)
     t.tokenize()
     return Parser(t).parseProgram()
Exemple #13
0
 def run_compile(content: str) -> Program:
     t = Tokenizer(content)
     t.tokenize()
     return Parser(t).parseProgram()