def process(content: str, e: Evaluator, duration: int): try: t = Tokenizer(content) t.tokenize() p = Parser(t).parseProgram() return e.evaluate(p, duration=duration) except (TokenizationError, ParseError) as e: print("\nERROR: %s\n" % str(e)) return 2, e
def test(self): content = "NOT A REAL PROGRAM" try: t = Tokenizer(content) t.tokenize() program: ast.Program = Parser(t).parseProgram() result: int = Evaluator(graphics=False).evaluate(program, duration=5000) self.fail() except TokenizationError: pass
def run_compile(content: str) -> str: token_obj = Tokenizer(content) return token_obj.check_next()
def run_compile(content: str) -> List[str]: return Tokenizer(content).tokenize()
def run_compile3(content: str) -> str: token_obj = Tokenizer(content) token_obj.get_next() token_obj.get_next() return token_obj.get_next()
def run_compile(content: str) -> str: return Tokenizer(content).get_next()
def run_compile(content: str) -> bool: return Tokenizer(content).more_tokens()
def run_compile(content: str): try: Tokenizer(content).get_and_check_next('source') self.fail() except TokenizationError: pass
def run_compile(content: str) -> str: return Tokenizer(content).get_and_check_next('program:')
def run_compile2(content: str) -> bool: token_obj = Tokenizer(content) token_obj.get_next() token_obj.get_next() return token_obj.check_token('program:')
def run_compile(content: str) -> bool: token_obj = Tokenizer(content) return token_obj.check_token('program:')
def parse(content) -> ast.Program: t = Tokenizer(content) t.tokenize() return Parser(t).parseProgram()
def run_compile(content: str) -> Program: t = Tokenizer(content) t.tokenize() return Parser(t).parseProgram()