Пример #1
0
def test_size():
    tokenized = Tokenized()

    assert tokenized.size() == 0

    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    assert tokenized.size() == 1

    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    assert tokenized.size() == 2
Пример #2
0
def test_no_variable():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list(
        [Token(TokenType.DOUBLE_QUOTED, "var == $var")])

    with pytest.raises(Exception):
        _ = preprocessor.process(tokenized, environment)
Пример #3
0
 def process(self, tokenized, environment):
     """
     Returns a Tokenized object with substituted variables from the environment
     in the given tokenized string.
     Substitution is not performed inside single quotes.
     An exception is throwing if there is no variable in the environment.
     Substitution is performed only in one pass.
     Token types are not changed after the substitution.
     """
     result = Tokenized()
     for token in tokenized.to_list():
         string_value = token.get_string_value()
         token_type = token.get_token_type()
         if token_type != TokenType.SINGLE_QUOTED:
             string_value = self.__substitute(string_value, environment)
         result.add(Token(token_type, string_value))
     return result
Пример #4
0
def test_split_by_type():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "1"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.SIMPLE, "2"))
    tokenized.add(Token(TokenType.SIMPLE, "3"))

    splitted = tokenized.split_by_type(TokenType.PIPE)

    assert len(splitted) == 3
    assert splitted[0].to_string_list() == ["1"]
    assert splitted[1].size() == 0
    assert splitted[2].to_string_list() == ["2", "3"]
Пример #5
0
def test_split_by_type_with_no_such_type():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "1"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    splitted = tokenized.split_by_type(TokenType.PIPE)
    assert len(splitted) == 1
    assert splitted[0].to_string_list() == ["1", "="]
Пример #6
0
def test_substitute_only_once():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([Token(TokenType.SIMPLE, "$var1")])
    environment.put("var1", "$var1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["$var1"]
Пример #7
0
def test_single_quoted():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([Token(TokenType.SINGLE_QUOTED, "'$var'")])
    environment.put("var", "1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["'$var'"]
Пример #8
0
def test_token_with_no_variable():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([Token(TokenType.SIMPLE, "var")])
    environment.put("var", "1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["var"]
Пример #9
0
def test_double_quoted():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list(
        [Token(TokenType.DOUBLE_QUOTED, "\"var == $var\"")])
    environment.put("var", "1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["\"var == 1\""]
Пример #10
0
def test_first():
    tokenized = Tokenized()

    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    first = tokenized.first()
    assert first.get_token_type() == TokenType.SIMPLE
    assert first.get_string_value() == "simple"

    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    assert first.get_token_type() == TokenType.SIMPLE
    assert first.get_string_value() == "simple"
Пример #11
0
def test_to_list():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))

    token_list = tokenized.to_list()
    assert token_list[0].get_token_type() == TokenType.PIPE
    assert token_list[0].get_string_value() == "|"
    assert token_list[1].get_token_type() == TokenType.ASSIGNMENT
    assert token_list[1].get_string_value() == "="

    assert len(token_list) == 2
Пример #12
0
def test_several():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([
        Token(TokenType.SIMPLE, "$var1"),
        Token(TokenType.SIMPLE, "var2"),
        Token(TokenType.SIMPLE, "$var3")
    ])
    environment.put("var1", "1")
    environment.put("var2", "2")
    environment.put("var3", "3")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["1", "var2", "3"]
Пример #13
0
def test_to_string_list():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    tokenized.add(Token(TokenType.SINGLE_QUOTED, "'single'"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    tokenized.add(Token(TokenType.EMPTY, " "))

    expected = ["simple", "\"double\"", "'single'", "|", "=", " "]
    assert expected == tokenized.to_string_list()
Пример #14
0
def test_make_string():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    tokenized.add(Token(TokenType.SINGLE_QUOTED, "'single'"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    tokenized.add(Token(TokenType.EMPTY, " "))

    expected = "simple\"double\"'single'|= "
    assert expected == tokenized.make_string()
Пример #15
0
def test_split_by_type_empty():
    tokenized = Tokenized()
    splitted = tokenized.split_by_type(TokenType.SIMPLE)
    assert len(splitted) == 1
    assert splitted[0].size() == 0
Пример #16
0
def test_to_list_empty():
    tokenized = Tokenized()
    assert tokenized.to_list() == []
Пример #17
0
def test_remove():
    tokenized = Tokenized()

    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    tokenized.add(Token(TokenType.SINGLE_QUOTED, "'single'"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    tokenized.add(Token(TokenType.EMPTY, " "))

    tokenized.remove(TokenType.SIMPLE)
    assert not tokenized.contains_token(TokenType.SIMPLE)

    tokenized.remove(TokenType.DOUBLE_QUOTED)
    assert not tokenized.contains_token(TokenType.DOUBLE_QUOTED)

    tokenized.remove(TokenType.SINGLE_QUOTED)
    assert not tokenized.contains_token(TokenType.SINGLE_QUOTED)

    tokenized.remove(TokenType.PIPE)
    assert not tokenized.contains_token(TokenType.PIPE)

    tokenized.remove(TokenType.ASSIGNMENT)
    assert not tokenized.contains_token(TokenType.ASSIGNMENT)

    tokenized.remove(TokenType.EMPTY)
    assert not tokenized.contains_token(TokenType.EMPTY)
Пример #18
0
def test_first_of_empty():
    tokenized = Tokenized()

    with pytest.raises(Exception):
        tokenized.first()