Ejemplo n.º 1
0
    def test_Tokenizer_get_and_check_next2(self):
        def run_compile(content: str):
            try:
                Tokenizer(content).get_and_check_next('source')
                self.fail()
            except TokenizationError:
                pass

        read_program_file("tests/res/programs/example1", run_compile)
Ejemplo n.º 2
0
    def test_Tokenizer_check_token(self):
        def run_compile(content: str) -> bool:
            token_obj = Tokenizer(content)
            return token_obj.check_token('program:')

        def run_compile2(content: str) -> bool:
            token_obj = Tokenizer(content)
            token_obj.get_next()
            token_obj.get_next()
            return token_obj.check_token('program:')

        p: bool = read_program_file("tests/res/programs/example1", run_compile)
        self.assertTrue(p)
        p = read_program_file("tests/res/programs/example1", run_compile2)
        self.assertFalse(p)
Ejemplo n.º 3
0
    def test_Tokenizer_check_next(self):
        def run_compile(content: str) -> str:
            token_obj = Tokenizer(content)
            return token_obj.check_next()

        p: str = read_program_file("tests/res/programs/example1", run_compile)
        self.assertEqual('program:', p)
Ejemplo n.º 4
0
def run_program(filename: str, graphics=False, duration=5000):
    try:
        return read_program_file(
            filename, lambda c: process(
                c, Evaluator(graphics=graphics), duration=duration))
    except FileNotFoundError as e:
        print("\nERROR: Could not read file '%s'\n" % filename)
        return 3, e
Ejemplo n.º 5
0
    def test_parse_1(self):
        def run_compile(content: str) -> Program:
            t = Tokenizer(content)
            t.tokenize()
            return Parser(t).parseProgram()

        p: Program = read_program_file("tests/res/programs/example1",
                                       run_compile)
Ejemplo n.º 6
0
    def test_Tokenizer_get_next(self):
        def run_compile(content: str) -> str:
            return Tokenizer(content).get_next()

        def run_compile2(content: str) -> str:
            token_obj = Tokenizer(content)
            token_obj.get_next()
            return token_obj.get_next()

        def run_compile3(content: str) -> str:
            token_obj = Tokenizer(content)
            token_obj.get_next()
            token_obj.get_next()
            return token_obj.get_next()

        p: str = read_program_file("tests/res/programs/example1", run_compile)
        self.assertEqual('program:', p)
        p = read_program_file("tests/res/programs/example1", run_compile2)
        self.assertEqual(';', p)
        p = read_program_file("tests/res/programs/example1", run_compile3)
        self.assertEqual('source', p)
Ejemplo n.º 7
0
    def test_Tokenizer_1(self):
        def run_compile(content: str) -> List[str]:
            return Tokenizer(content).tokenize()

        p: List[str] = read_program_file("tests/res/programs/example1",
                                         run_compile)
        res = [
            'program:', ';', 'source', '=', 'remote', "(", '"',
            'www.coviddata.com/stream', '"', ")", ';', 'map', "(", 'source',
            ")", '"', 'case_date', '"', 'to', 'number', 'date', ';', 'number',
            'count', '=', '0', ';', 'observe', "(", 'source', ")", "do",
            'count++', ';', 'plot', 'scatter_xy', "(", 'date', ",", 'age', ")",
            'titled', '"', 'age_graph', '"', ';', 'plot', 'line_xy', "(",
            'date', ",", 'log', '(', 'count', ')', ")", 'titled', '"',
            'cases_', 'log', '"', ';', 'start!'
        ]
        self.assertEqual(res, p)
Ejemplo n.º 8
0
    def test_Tokenizer_2(self):
        def run_compile(content: str) -> List[str]:
            return Tokenizer(content).tokenize()

        p: List[str] = read_program_file("tests/res/programs/example2",
                                         run_compile)
        res = [
            '<START>', ';', 'source', '=', '"',
            'http://winterolympicsmedals.com/medals.csv', '"', ';', 'map',
            'source', '"', 'Year', '"', 'to', 'number', 'year', ';', 'map',
            'source', '"', 'Medal', '"', 'to', 'number', 'medal', ';', 'year',
            '=', 'year', '/', '100', ';', 'year', '=', 'year', '*', '10', ';',
            'year', '=', 'year', '+', '23', ';', 'year', '=', 'year', '-',
            '12', ';', 'plot', 'xy', 'year', 'medal', 'titled', 'medal_graph',
            ';', '<END>'
        ]
        self.assertEqual(res, p)
Ejemplo n.º 9
0
    def test_regular(self):
        p_expected: ast.Program = ast.Program(
            ast.Body([

                # source = live remote "https://covid-api.com/api/reports"
                ast.Loader(ast.Var("source"),
                           ast.Source("https://covid-api.com/api/reports")),

                # map source "confirmed" to number confirmed
                ast.Mapper(
                    ast.Var("source"), "confirmed",
                    ast.Declare(ast.Type(Types.NUMBER), ast.Var("confirmed"))),

                # number count = 0
                ast.Assigner(
                    ast.Declare(ast.Type(Types.NUMBER), ast.Var("count")),
                    ast.Value(values.IntegerValue(0))),

                # observe(source) do count++
                ast.Trigger(ast.Var("source"),
                            ast.MathFuncs([ast.Increment(ast.Var("count"))])),

                # plot line_xy(count,confirmed) called "confirmed_cases"
                ast.Plotter(ast.Graph(graphs.LineXYGraph()),
                            ast.VarAxis(ast.Var("count")),
                            ast.VarAxis(ast.Var("confirmed")),
                            "confirmed_cases"),
            ]))

        def parse(content) -> ast.Program:
            t = Tokenizer(content)
            t.tokenize()
            return Parser(t).parseProgram()

        p: ast.Program = read_program_file(
            "tests/res/programs/regular_program.mstx", parse)

        self.assertTrue(ast_equal(p_expected, p))
Ejemplo n.º 10
0
    def test_Tokenizer_3(self):
        def run_compile(content: str) -> List[str]:
            return Tokenizer(content).tokenize()

        p: List[str] = read_program_file("tests/res/programs/example3",
                                         run_compile)
        res = [
            '<START>', ';', 'source', '=', 'static', 'remote', '"',
            'http://winterolympicsmedals.com/medals.csv', '"', ';', 'map',
            'source', '"', 'Year', '"', 'to', 'number', 'year', ';', 'map',
            'source', '"', 'Medal', '"', 'to', 'number', 'medal', ';',
            'number', 'shortyear', '=', 'year', '-', '1900', ';', 'number',
            'addyear', '=', 'year', '+', '50', ';', 'number', 'timesyear', '=',
            'year', '*', '10', ';', 'number', 'divyear', '=', 'year', '/',
            '100', ';', 'number', 'sin', 'year', '=', 'sin', '(', 'year', ')',
            ';', 'number', 'cos', 'year', '=', 'cos', '(', 'year', ')', ';',
            'number', 'exp', 'year', '=', 'exp', '(', 'year', ')', ';',
            'number', 'sqyear', '=', 'year', '^', '2', ';', 'number', 'thing',
            '=', '2000', ';', 'thing--', ',', 'addyear++', ';', 'binary',
            'bin', '=', 'true', ';', 'binary', 'bin2', '=', 'false', ';',
            'plot', 'xy', 'cos', '(', 'year', ')', 'medal', 'titled', 'cos',
            '_medal_graph', ';', '<END>'
        ]
        self.assertEqual(res, p)
Ejemplo n.º 11
0
    def test_Tokenizer_check_next2(self):
        def run_compile(content: str) -> str:
            return Tokenizer(content).check_next()

        p: str = read_program_file("tests/res/programs/empty", run_compile)
        self.assertEqual("NO MORE TOKENS", p)
Ejemplo n.º 12
0
    def test_Tokenizer_more_tokens2(self):
        def run_compile(content: str) -> bool:
            return Tokenizer(content).more_tokens()

        p: bool = read_program_file("tests/res/programs/empty", run_compile)
        self.assertFalse(p)
Ejemplo n.º 13
0
    def test_Tokenizer_get_and_check_next(self):
        def run_compile(content: str) -> str:
            return Tokenizer(content).get_and_check_next('program:')

        p: str = read_program_file("tests/res/programs/example1", run_compile)
        self.assertEqual('program:', p)