Example #1
0
def test_line_lexer_identifies_tokens():
    config = """
    a_key=this_ValUe34
    
    """
    lex = app.LineLexer(config)
    token1 = lex.get_next_token()
    assert isinstance(token1, app.Token), 'tokens must be of type Token'
    assert token1.token_type == app.LineLexer.TOKEN_NAME, 'first token must be of type name'
    assert token1.token_literal == 'a_key', 'first token must be `a_key`'

    token2 = lex.get_next_token()
    assert isinstance(token2, app.Token), 'tokens must be of type Token'
    assert token2.token_type == app.LineLexer.TOKEN_EQ_SIGN, 'second token must be of type assignment operator'
    assert token2.token_literal == '=', 'second token must be the equal sign'

    token3 = lex.get_next_token()
    assert isinstance(token3, app.Token), 'tokens must be of type Token'
    assert token3.token_type == app.LineLexer.TOKEN_NAME, 'third token must be of type name'
    assert token3.token_literal == 'this_ValUe34', 'third token must be `this_ValUe34`'

    token4 = lex.get_next_token()
    assert isinstance(token4, app.Token), 'tokens must be of type Token'
    assert token4.token_type == app.LineLexer.TOKEN_EOF, 'last token must be of type end of file/end of line'
    assert token4.token_literal == '<EOF>', 'last token must be `<EOL>`'
Example #2
0
def test_lexer_parses_no_booleans():
    config = """
    me_key=no 
    """

    lex = app.LineLexer(config)
    lex.get_next_token()  # this token has been tested so ignoring
    lex.get_next_token()  # this token has been tested so ignoring
    t = lex.get_next_token()
    assert t.token_type == app.LineLexer.TOKEN_BOOLEAN, 'third token must be of type boolean'
    assert t.token_literal is False, 'Third token must be a negative boolean'
Example #3
0
def test_lexer_parses_floats():

    config1 = """
    me_key = 12.23
    """

    lex1 = app.LineLexer(config1)
    lex1.get_next_token()  # this token has been tested so ignoring
    lex1.get_next_token()  # this token has been tested so ignoring
    t = lex1.get_next_token()
    assert t.token_type == app.LineLexer.TOKEN_NUMBER, 'third token must be of type number'
    assert t.token_literal == 12.23, 'third token must be the number 12.23'
Example #4
0
def test_lexer_parses_false_booleans():

    config1 = """
    me_key =False
    """

    lex1 = app.LineLexer(config1)
    lex1.get_next_token()  # this token has been tested so ignoring
    lex1.get_next_token()  # this token has been tested so ignoring
    t = lex1.get_next_token()
    assert t.token_type == app.LineLexer.TOKEN_BOOLEAN, 'third token must be of type boolean and ignore casing'
    assert t.token_literal is False, 'third token must be a negative boolean'
Example #5
0
def test_other_lexer_special_characters():
    config1 = """
    # this is a comment line
    """

    lex1 = app.LineLexer(config1)
    t1 = lex1.get_next_token()

    assert t1.token_type == app.LineLexer.TOKEN_EOF
    assert t1.token_literal == '<EOF>', 'Lexer should have ignored entire comment line and return an EOF char'

    config2 = """
    path = /var/log/app.log
    """
    lex2 = app.LineLexer(config2)
    lex2.get_next_token()  # this token has been tested so ignoring
    lex2.get_next_token()  # this token has been tested so ignoring
    t4 = lex2.get_next_token()
    t5 = lex2.get_next_token()

    assert t4.token_type == app.LineLexer.TOKEN_NAME
    assert t4.token_literal == '/var/log/app.log', 'Lexer should have properly handle path like values'
    assert t5.token_type == app.LineLexer.TOKEN_EOF
Example #6
0
def test_lexer_fails_on_invalid_characters():
    config1 = """
    invalid = $no_valid`se
    """

    lex1 = app.LineLexer(config1)
    lex1.get_next_token()  # this token has been tested so ignoring
    lex1.get_next_token()  # this token has been tested so ignoring
    try:
        lex1.get_next_token()
    except app.InvalidToken as e:
        exceptions.append(e)

    assert len(exceptions) == 1, '$ is not a valid character so an exception should have been thrown'
    assert isinstance(exceptions[0], app.InvalidToken), 'exception should be an invalid token exception'
Example #7
0
def test_lexer_ignores_whitespace():
    config = """
    me_key =me_val 
    
    \t
    """
    lex = app.LineLexer(config)
    t = lex.get_next_token()
    assert t.token_type == app.LineLexer.TOKEN_NAME, 'first token must be of type name'
    assert t.token_literal == 'me_key', 'first token must be `me_key`'

    t1 = lex.get_next_token()
    assert t1.token_type == app.LineLexer.TOKEN_EQ_SIGN, 'second token must be of type assignment operator'
    assert t1.token_literal == '=', 'first token must be the equal sign'

    t2 = lex.get_next_token()
    assert t2.token_type == app.LineLexer.TOKEN_NAME, 'third token must be of type name'
    assert t2.token_literal == 'me_val', 'first token must be `a_key`'

    t3 = lex.get_next_token()
    assert t3.token_type == app.LineLexer.TOKEN_EOF, 'last token must be of type end of file/end of line'
    assert t3.token_literal == '<EOF>', 'last token must be `<EOL>`'