Exemple #1
0
def test_parser_invalid_line():
    """
    To validate the parser detects the presence of an missing numeral in
    expression
    """
    parser = Parser(Lexer(Scanner("1 + 2 +\n")))

    with pytest.raises(LythSyntaxError) as err:
        next(parser)

    assert err.value.msg is LythError.INCOMPLETE_LINE
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 6
    assert err.value.line == "1 + 2 +"

    parser = Parser(Lexer(Scanner("1 + 2 *\n")))

    with pytest.raises(LythSyntaxError) as err:
        next(parser)

    assert err.value.msg is LythError.INCOMPLETE_LINE
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 6
    assert err.value.line == "1 + 2 *"
def test_analyzer_wrong_assign(clean_namespace):
    """
    To validate that the analyzer generates exceptions when the assign operator
    is not properly written.
    """
    analyzer = Analyzer(Parser(Lexer(Scanner('7 + 4 -> b + 1\n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"

    with pytest.raises(LythSyntaxError) as err:
        analyzer()

    assert err.value.msg is LythError.GARBAGE_CHARACTERS
    assert err.value.filename == "__test__"
    assert err.value.lineno == 0
    assert err.value.offset == 6
    assert err.value.line == "7 + 4 -> b + 1"

    analyzer = Analyzer(Parser(Lexer(Scanner('7 + 4 <- 1 + 2\n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"

    with pytest.raises(LythSyntaxError) as err:
        analyzer()

    assert err.value.msg is LythError.LEFT_MEMBER_IS_EXPRESSION
    assert err.value.filename == "__test__"
    assert err.value.lineno == 0
    assert err.value.offset == 2
    assert err.value.line == "7 + 4 <- 1 + 2"

    analyzer = Analyzer(Parser(Lexer(Scanner('7 + 4 -> \n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"

    with pytest.raises(LythSyntaxError) as err:
        analyzer()

    assert err.value.msg is LythError.INCOMPLETE_LINE
    assert err.value.filename == "__test__"
    assert err.value.lineno == 0
    assert err.value.offset == 8
    assert err.value.line == "7 + 4 -> "

    analyzer = Analyzer(Parser(Lexer(Scanner('7 + 4 -> 6\n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"

    with pytest.raises(LythSyntaxError) as err:
        analyzer()

    assert err.value.msg is LythError.NAME_EXPECTED
    assert err.value.filename == "__test__"
    assert err.value.lineno == 0
    assert err.value.offset == 9
    assert err.value.line == "7 + 4 -> 6"
Exemple #3
0
def test_missing_space_after_operator():
    """
    A missing space after an operator causes the lexer to send an exception,
    but tolerates '+' or '-'.

    But it does not tolerate '+=4'
    """
    lexer = Lexer(Scanner("1  +2  \n"))

    token = next(lexer)
    assert token.info.offset == 0
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 1
    assert token.info.lineno == 0
    assert token.symbol == Literal.VALUE
    assert token.info.line == "1  +2  "

    token = next(lexer)
    assert token.info.offset == 3
    assert token.info.filename == "<stdin>"
    assert token.lexeme == '+'
    assert token.info.lineno == 0
    assert token.symbol == Symbol.ADD
    assert token.info.line == "1  +2  "

    token = next(lexer)
    assert token.info.offset == 4
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 2
    assert token.info.lineno == 0
    assert token.symbol == Literal.VALUE
    assert token.info.line == "1  +2  "

    lexer = Lexer(Scanner("1  //2  \n"))

    token = next(lexer)
    assert token.info.offset == 0
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 1
    assert token.info.lineno == 0
    assert token.symbol == Literal.VALUE
    assert token.info.line == "1  //2  "

    with pytest.raises(LythSyntaxError) as err:
        token = next(lexer)

    assert err.value.msg is LythError.MISSING_SPACE_AFTER_OPERATOR
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 3
    assert err.value.line == "1  //2  "
Exemple #4
0
def test_parser_nested_let_assign():
    """
    To validate the parser solves nested let.
    """
    parser = Parser(Lexer(Scanner("let:\n  let:\n    a <- 1 + 2\n  b <- a * 3\n\n")))

    assign = parser()
    assert assign.name == NodeType.Let
    assert str(assign) == "Let(Let(MutableAssign(Name(a), Add(Num(1), Num(2)))), MutableAssign(Name(b), Mul(Name(a), Num(3))))"

    parser = Parser(Lexer(Scanner("let:\n  let:\n    a <- 1 + 2\n\n  b <- a * 3\n\n")))

    assign = parser()
    assert assign.name == NodeType.Let
    assert str(assign) == "Let(Let(MutableAssign(Name(a), Add(Num(1), Num(2)))), MutableAssign(Name(b), Mul(Name(a), Num(3))))"
def test_analyzer_immutable_assign(clean_namespace):
    """
    To validate that the analyzer is able to produce a symbol table.

    We test store and load context, that is, a value is stored with the result
    of an expression, and then we use the alias to store another value into
    another variable.
    """

    analyzer = Analyzer(Parser(Lexer(Scanner('7 + 4 -> b\n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"

    analyzer()
    assert analyzer.table[('b', '__test__')].type.value == 11
    assert analyzer.table[('b', '__test__')].type.mutable == Field.IMMUTABLE
    assert analyzer.table[('b', '__test__')].type.type == Field.UNKNOWN

    analyzer.parser.lexer.scanner += '12 -> b\n'
    with pytest.raises(LythSyntaxError) as err:
        analyzer()

    assert err.value.msg is LythError.REASSIGN_IMMUTABLE
    assert err.value.filename == "__test__"
    assert err.value.lineno == 1
    assert err.value.offset == 3
    assert err.value.line == "12 -> b"
Exemple #6
0
def test_parser_let_assign():
    """
    To validate the parser iterates properly over a variable being assigned the
    result of an expression.
    """
    parser = Parser(Lexer(Scanner("let a <- 1 + 2\n")))

    assign = parser()
    assert assign.name == NodeType.Let
    assert str(assign) == "Let(MutableAssign(Name(a), Add(Num(1), Num(2))))"

    parser = Parser(Lexer(Scanner("let b * 2 -> a\n")))

    assign = parser()
    assert assign.name == NodeType.Let
    assert str(assign) == "Let(ImmutableAssign(Name(a), Mul(Name(b), Num(2))))"
Exemple #7
0
def test_tabulation():
    """
    To validate the scanner considers "\t" as a single character, but double space.
    """
    s = "let\t:\n"
    scan = Scanner(s)

    for i, e in enumerate(scan):

        if i < 3:
            assert e == s[i]
            assert scan.lineno == 0
            assert scan.offset == i
            assert f"{scan!s}" == f"in line 0 column {i}"
            assert f"{scan!r}" == f"in line 0 column {i}:\n\t\"let  :\"\n\t{' ' * (i + 1)}^"
            assert scan.line == 'let  :'

        elif i == 3:
            assert e == " "
            assert scan.lineno == 0
            assert scan.offset == 3
            assert f"{scan!s}" == f"in line 0 column {3}"
            assert f"{scan!r}" == f"in line 0 column {3}:\n\t\"let  :\"\n\t{' ' * 4}^"
            assert scan.line == 'let  :'

        elif i == 4:
            assert e == " "
            assert scan.lineno == 0
            assert scan.offset == 3
            assert f"{scan!s}" == f"in line 0 column {3}"
            assert f"{scan!r}" == f"in line 0 column {3}:\n\t\"let  :\"\n\t{' ' * 4}^"
            assert scan.line == 'let  :'
Exemple #8
0
def test_parser_let_be():
    """
    To validate the parser iterates properly over a variable being assigned the
    result of an expression.
    """
    parser = Parser(Lexer(Scanner("let b:\n")))

    assign = parser()
    assert assign.name == NodeType.Let
    assert str(assign) == "Let(Class(Name(b), None))"

    parser = Parser(Lexer(Scanner("let bit be attribute:\n")))

    assign = parser()
    assert assign.name == NodeType.Let
    assert str(assign) == "Let(Class(Name(bit), Type(Name(attribute))))"
Exemple #9
0
def test_scanner_respawn():
    """
    A scanner that hit an EOF can actually be "respawned" by feeding it with
    more lines
    """
    scan = Scanner("a\n")

    char = scan()
    assert char == "a"
    assert scan.lineno == 0
    assert scan.offset == 0
    assert f"{scan!s}" == f"in line 0 column 0"
    assert f"{scan!r}" == f"in line 0 column 0:\n\t\"a\"\n\t ^"
    assert scan.line == 'a'

    char = scan()
    assert char == '\n'
    assert scan.lineno == 0

    with pytest.raises(StopIteration):
        next(scan)

    scan += "b"

    char = scan()
    assert char == "b"
    assert scan.lineno == 1
    assert scan.offset == 0
    assert f"{scan!s}" == f"in line 1 column 0"
    assert f"{scan!r}" == f"in line 1 column 0:\n\t\"b\"\n\t ^"
    assert scan.line == 'b'
Exemple #10
0
def test_parser_docstring():
    """
    To validate that the docstring of a class, or a function is not captured in
    this version of the tool, and considered as multiline comment.
    """
    parser = Parser(Lexer(Scanner('"""\nHello you\n"""\n')))
    test = parser()
    assert test.name == NodeType.Noop
Exemple #11
0
def test_ast():
    """
    To validate that an AST node can be properly instantiated from a token.
    """
    lexer = Lexer(Scanner("1 + 2\n", filename="dummy.txt"))

    node_one = Node(next(lexer))
    assert node_one._children == (1, )
    assert node_one.filename == "dummy.txt"
    assert node_one.lineno == 0
    assert node_one.offset == 0
    assert node_one.line == "1 + 2"

    assert node_one.value == 1
    assert str(node_one) == "Num(1)"
    assert repr(node_one) == "NodeType.Num(1)"

    with pytest.raises(AttributeError):
        assert node_one.left == 1

    with pytest.raises(AttributeError):
        assert node_one.right == 2

    token_plus = next(lexer)

    node_two = Node(next(lexer))
    assert node_two._children == (2, )
    assert node_two.filename == "dummy.txt"
    assert node_two.lineno == 0
    assert node_two.offset == 4
    assert node_two.line == "1 + 2"

    assert node_two.value == 2
    assert str(node_two) == "Num(2)"
    assert repr(node_two) == "NodeType.Num(2)"

    node_plus = Node(token_plus, node_one, node_two)

    assert node_plus._children[0] is node_one
    assert node_plus._children[1] is node_two

    with pytest.raises(IndexError):
        assert node_plus._children[2] is node_one

    assert node_plus.filename == "dummy.txt"
    assert node_plus.lineno == 0
    assert node_plus.offset == 2
    assert node_plus.line == "1 + 2"

    with pytest.raises(AttributeError):
        assert node_plus.value == 1

    assert node_plus.left is node_one
    assert node_plus.right is node_two

    assert str(node_plus) == "Add(Num(1), Num(2))"
    assert repr(node_plus) == "NodeType.Add(Num(1), Num(2))"
Exemple #12
0
def test_multiple_spaces():
    """
    Multiple spaces are skipped
    """
    lexer = Lexer(Scanner("1  //  2  \n"))

    token = next(lexer)
    assert token.info.offset == 0
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 1
    assert token.info.lineno == 0
    assert token.symbol == Literal.VALUE
    assert token.info.line == "1  //  2  "

    token = next(lexer)
    assert token.info.offset == 3
    assert token.info.filename == "<stdin>"
    assert token.lexeme == '//'
    assert token.info.lineno == 0
    assert token.symbol == Symbol.FLOOR
    assert token.info.line == "1  //  2  "

    token = next(lexer)
    assert token.info.offset == 7
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 2
    assert token.info.lineno == 0
    assert token.symbol == Literal.VALUE
    assert token.info.line == "1  //  2  "

    token = next(lexer)
    assert token.info.offset == 9
    assert token.info.filename == "<stdin>"
    assert token.lexeme == '\n'
    assert token.info.lineno == 0
    assert token.symbol == Symbol.EOL
    assert token.info.line == "1  //  2  "

    token = next(lexer)
    assert token.info.offset == -1
    assert token.info.filename == "<stdin>"
    assert token.lexeme is None
    assert token.info.lineno == 1
    assert token.symbol == Symbol.EOF
    assert token.info.line == ""

    with pytest.raises(StopIteration):
        token = next(lexer)

    assert token.info.offset == -1
    assert token.info.filename == "<stdin>"
    assert token.lexeme is None
    assert token.info.lineno == 1
    assert token.symbol == Symbol.EOF
    assert token.info.line == ""
Exemple #13
0
def test_too_much_spaces():
    """
    Colon symbol is not allowed to have a space before. Spaces after are, for
    now, tolerated.
    """
    lexer = Lexer(Scanner("let :\n"))

    with pytest.raises(LythSyntaxError) as err:
        for i, _ in enumerate(lexer):
            pass

    assert err.value.msg is LythError.TOO_MUCH_SPACE_BEFORE
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 4
    assert err.value.line == "let :"

    lexer = Lexer(Scanner("let:    \n"))

    token = next(lexer)
    assert token.info.offset == 0
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 'let'
    assert token.info.lineno == 0
    assert token.symbol == Keyword.LET
    assert token.info.line == "let:    "

    token = next(lexer)
    assert token.info.offset == 3
    assert token.info.filename == "<stdin>"
    assert token.lexeme == ':'
    assert token.info.lineno == 0
    assert token.symbol == Symbol.COLON
    assert token.info.line == "let:    "

    token = next(lexer)
    assert token.info.offset == 7
    assert token.info.filename == "<stdin>"
    assert token.lexeme == '\n'
    assert token.info.lineno == 0
    assert token.symbol == Symbol.EOL
    assert token.info.line == "let:    "
Exemple #14
0
def test_indent():
    """
    To validate the lexer produces the right indent token.
    """
    lexer = Lexer(Scanner("  1 + 2\n"))

    token = next(lexer)
    assert token.info.offset == 0
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 1
    assert token.info.lineno == 0
    assert token.symbol == Symbol.INDENT
    assert token.info.line == "  1 + 2"

    token = next(lexer)
    assert token.info.offset == 2
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 1
    assert token.info.lineno == 0
    assert token.symbol == Literal.VALUE
    assert token.info.line == "  1 + 2"

    lexer = Lexer(Scanner("    1 + 2\n"))

    token = next(lexer)
    assert token.info.offset == 0
    assert token.info.filename == "<stdin>"
    assert token.lexeme == 2
    assert token.info.lineno == 0
    assert token.symbol == Symbol.INDENT
    assert token.info.line == "    1 + 2"

    lexer = Lexer(Scanner("   1 + 2\n"))

    with pytest.raises(LythSyntaxError) as err:
        token = next(lexer)

    assert err.value.msg is LythError.UNEVEN_INDENT
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 0
    assert err.value.line == "   1 + 2"
Exemple #15
0
def test_parser_wrong_expression():
    """
    To validate the parser complains under various situations:

    1. Expression has too much trailing characters.
    2. Expression has an unexpected let keyword.
    """
    parser = Parser(Lexer(Scanner("1 + 2 + 3 ")))

    with pytest.raises(LythSyntaxError) as err:
        parser()

    assert err.value.msg is LythError.GARBAGE_CHARACTERS

    parser = Parser(Lexer(Scanner("let 1 + 2 + 3\n")))

    with pytest.raises(LythSyntaxError) as err:
        parser()

    assert err.value.msg is LythError.LET_ON_EXPRESSION
Exemple #16
0
def test_interpreter():
    """
    Basic set of visiting nodes
    """
    interpreter = Interpreter()

    cmd = next(Parser(Lexer(Scanner("1 + 2\n"))))
    assert interpreter.visit(cmd) == 3

    cmd = next(Parser(Lexer(Scanner("1 * 2\n"))))
    assert interpreter.visit(cmd) == 2

    cmd = next(Parser(Lexer(Scanner("1 - 2\n"))))
    assert interpreter.visit(cmd) == -1

    cmd = next(Parser(Lexer(Scanner("\n"))))
    assert interpreter.visit(cmd) is None

    cmd = next(Parser(Lexer(Scanner("a\n"))))
    assert interpreter.visit(cmd) == 'a'
Exemple #17
0
def test_parser_expression():
    """
    To validate the parser iterates properly over an expression.

    The last node on a properly formatted string should be No Op.
    """
    parser = Parser(Lexer(Scanner("1 + 2 + 3 \n")))

    for i in parser:
        pass

    assert i.name == NodeType.Noop
    assert str(i) == "Noop()"
Exemple #18
0
def test_parser_parenthesis():
    """
    Parentheses offer the highest precedence and change AST node ordering.
    """
    parser = Parser(Lexer(Scanner("1 + (a - 3) * 5\n")))

    expr = next(parser)
    assert expr.name == NodeType.Add
    assert str(expr) == "Add(Num(1), Mul(Sub(Name(a), Num(3)), Num(5)))"

    expr = next(parser)
    assert expr.name == NodeType.Noop
    assert str(expr) == "Noop()"
Exemple #19
0
def test_parser_substraction():
    """
    To validate the parser returns the right AST node.
    """
    parser = Parser(Lexer(Scanner("1 + 2 - 3\n")))

    expr = next(parser)
    assert expr.name == NodeType.Sub
    assert str(expr) == "Sub(Add(Num(1), Num(2)), Num(3))"

    expr = next(parser)
    assert expr.name == NodeType.Noop
    assert str(expr) == "Noop()"
Exemple #20
0
def test_parser_precedence():
    """
    To validate the parser returns the right AST node.
    """
    parser = Parser(Lexer(Scanner("1 + 2 * 3 - 1\n")))

    expr = next(parser)
    assert expr.name == NodeType.Sub
    assert str(expr) == "Sub(Add(Num(1), Mul(Num(2), Num(3))), Num(1))"

    expr = next(parser)
    assert expr.name == NodeType.Noop
    assert str(expr) == "Noop()"
Exemple #21
0
def test_parser_multiplication():
    """
    To validate the parser returns the right AST node.
    """
    parser = Parser(Lexer(Scanner("1 * 2 * 3\n")))

    expr = next(parser)
    assert expr.name == NodeType.Mul
    assert str(expr) == "Mul(Mul(Num(1), Num(2)), Num(3))"

    expr = next(parser)
    assert expr.name == NodeType.Noop
    assert str(expr) == "Noop()"
Exemple #22
0
def main(argv=sys.argv):
    """
    The main entry point of the application.
    """
    settings = fetch(argv[1:])
    error = 0

    # interpreter = Interpreter()

    count = 0

    while count <= settings.cycle:
        try:
            source = input('>>> ')
            if source.rstrip() and source.rstrip()[-1] == ':':
                while True:
                    line = input('... ')
                    source += '\n' + line
                    if not line or len(line) - len(line.lstrip()) == 0:
                        break

            scanner = Scanner(source + "\n")
            parser = Parser(Lexer(scanner))
            analyzer = Analyzer(parser)

            cmd = next(parser)
            # print(interpreter.visit(cmd))

            ret = analyzer.visit(cmd)
            if ret:
                print(ret)

        except LythSyntaxError as e:
            print(e)

        except KeyboardInterrupt:
            print("Keyboard interrupt")
            break

        except Exception:
            exc_type, exc_value, exc_tb = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_tb)
            error = 1
            break

        if settings.cycle:
            count += 1

    print("Goodbye.")
    return error
Exemple #23
0
def test_parser_invalid_expression():
    """
    To validate the parser detects the expression it evaluates is invalid.
    """
    parser = Parser(Lexer(Scanner("1 + 2 + /\n")))

    with pytest.raises(LythSyntaxError) as err:
        next(parser)

    assert err.value.msg is LythError.LITERAL_EXPECTED
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 8
    assert err.value.line == "1 + 2 + /"
Exemple #24
0
def test_parser_invalid_let():
    """
    To validate the parser checks let block statement integrity
    """
    parser = Parser(Lexer(Scanner("let: a <- 1 + 2\n    b <- a * 3\n\n")))

    with pytest.raises(LythSyntaxError) as err:
        next(parser)

    assert err.value.msg is LythError.GARBAGE_CHARACTERS
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 5
    assert err.value.line == "let: a <- 1 + 2"

    parser = Parser(Lexer(Scanner("let:\na <- 1 + 2\n  b <- a * 3\n\n")))

    with pytest.raises(LythSyntaxError) as err:
        next(parser)

    assert err.value.msg is LythError.INCONSISTENT_INDENT
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 1
    assert err.value.offset == 0
    assert err.value.line == "a <- 1 + 2"

    parser = Parser(Lexer(Scanner("let:\n  a <- 1 + 2\n    b <- a * 3\n\n")))

    with pytest.raises(LythSyntaxError) as err:
        next(parser)

    assert err.value.msg is LythError.INCONSISTENT_INDENT
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 2
    assert err.value.offset == 0
    assert err.value.line == "    b <- a * 3"
def test_analyzer_let(clean_namespace):
    """
    To validate that for now the analyzer is not doing anything upon let
    keyword
    """
    analyzer = Analyzer(Parser(Lexer(Scanner('let a <- 1 + 2\n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"

    analyzer()
    assert analyzer.table[('a', '__test__')].type.value == 3
    assert analyzer.table[('a', '__test__')].type.mutable == Field.MUTABLE
    assert analyzer.table[('a', '__test__')].type.type == Field.UNKNOWN
    assert analyzer.table.left is None
    assert str(analyzer.table.right) == "a, __test__"
Exemple #26
0
def test_missing_empty_line():
    """
    The script must end up with an empty line.
    """
    lexer = Lexer(Scanner("1 + 2"))

    with pytest.raises(LythSyntaxError) as err:
        for i, _ in enumerate(lexer):
            pass

    assert err.value.msg is LythError.MISSING_EMPTY_LINE
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 4
    assert err.value.line == "1 + 2"
Exemple #27
0
def test_missing_space_before_operator():
    """
    A missing space causes the lexer to send an exception.
    """
    token = None
    lexer = Lexer(Scanner("1+  2  \n"))

    with pytest.raises(LythSyntaxError) as err:
        token = next(lexer)

    assert token is None
    assert err.value.msg is LythError.MISSING_SPACE_BEFORE_OPERATOR
    assert err.value.filename == "<stdin>"
    assert err.value.lineno == 0
    assert err.value.offset == 0
    assert err.value.line == "1+  2  "
def test_analyzer_iterator(clean_namespace):
    """
    To validate the ability to analyze multiple lines on the fly
    """
    analyzer = Analyzer(Parser(Lexer(Scanner('a <- 1 + 2\na * 5 -> b\n', '__test__'))))

    assert str(analyzer.table) == "__test__, root"
    assert analyzer.table.left is None
    assert analyzer.table.right is None

    for _ in analyzer:
        print(f"TEST: left node of root is {analyzer.table.left}")
        print(f"TEST: right node of root is {analyzer.table.right}")

    assert analyzer.table[('a', '__test__')].type.value == 3
    assert analyzer.table[('a', '__test__')].type.mutable == Field.MUTABLE
    assert analyzer.table[('b', '__test__')].type.value == 15
    assert analyzer.table[('b', '__test__')].type.mutable == Field.IMMUTABLE
def test_analyzer_unknown_ast_node(clean_namespace):
    """
    To validate we get a dedicated error message when the AST node is not valid
    """
    parser = Parser(Lexer(Scanner('a <- 1 + 2\na * 5 -> b\n', '__test__')))

    node = next(parser)

    from enum import Enum

    class Dummy(Enum):
        DUMMY = "dummy"

    node.name = Dummy.DUMMY

    analyzer = Analyzer(None, "__dummy__.py")

    with pytest.raises(TypeError):
        analyzer.visit(node)
Exemple #30
0
def test_interpreter_whole_file():
    """
    The interpreter should survive a basic set of lyth commands.

    Next, we'll see if bytecode can be properly produced... ;)
    """
    path = Path(__file__).resolve().parent / 'resources/stm32f4_gpiob.lyth'

    f = open(path)
    try:
        interpreter = Interpreter()
        parser = Parser(Lexer(Scanner(f.read(),
                                      filename="stm32f4_gpiob.lyth")))

        while True:
            cmd = next(parser)
            assert interpreter.visit(cmd) is None

    except Exception:
        raise

    finally:
        f.close()