Esempio n. 1
0
def test_split_by_type_with_no_such_type():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "1"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    splitted = tokenized.split_by_type(TokenType.PIPE)
    assert len(splitted) == 1
    assert splitted[0].to_string_list() == ["1", "="]
Esempio n. 2
0
def test_empty_pipe():
    parser = Parser()
    command = "||"
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 2
    __check_token(Token(TokenType.PIPE, "|"), tokenized[0])
    __check_token(Token(TokenType.PIPE, "|"), tokenized[1])
Esempio n. 3
0
def test_parse_assignment():
    parser = Parser()
    command = "var=\"1\""
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    __check_token(Token(TokenType.SIMPLE, "var"), tokenized[0])
    __check_token(Token(TokenType.ASSIGNMENT, "="), tokenized[1])
    __check_token(Token(TokenType.DOUBLE_QUOTED, "\"1\""), tokenized[2])
Esempio n. 4
0
def test_parse_subsequent_quotes():
    parser = Parser()
    command = "''\"\"''"
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    __check_token(Token(TokenType.SINGLE_QUOTED, "''"), tokenized[0])
    __check_token(Token(TokenType.DOUBLE_QUOTED, "\"\""), tokenized[1])
    __check_token(Token(TokenType.SINGLE_QUOTED, "''"), tokenized[2])
Esempio n. 5
0
def test_parse_spaces_at_ends():
    parser = Parser()
    command = " simple "
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    __check_token(Token(TokenType.EMPTY, " "), tokenized[0])
    __check_token(Token(TokenType.SIMPLE, "simple"), tokenized[1])
    __check_token(Token(TokenType.EMPTY, " "), tokenized[2])
Esempio n. 6
0
def test_size():
    tokenized = Tokenized()

    assert tokenized.size() == 0

    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    assert tokenized.size() == 1

    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    assert tokenized.size() == 2
Esempio n. 7
0
def test_first():
    tokenized = Tokenized()

    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    first = tokenized.first()
    assert first.get_token_type() == TokenType.SIMPLE
    assert first.get_string_value() == "simple"

    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    assert first.get_token_type() == TokenType.SIMPLE
    assert first.get_string_value() == "simple"
Esempio n. 8
0
def test_to_list():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))

    token_list = tokenized.to_list()
    assert token_list[0].get_token_type() == TokenType.PIPE
    assert token_list[0].get_string_value() == "|"
    assert token_list[1].get_token_type() == TokenType.ASSIGNMENT
    assert token_list[1].get_string_value() == "="

    assert len(token_list) == 2
Esempio n. 9
0
def test_split_by_type():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "1"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.SIMPLE, "2"))
    tokenized.add(Token(TokenType.SIMPLE, "3"))

    splitted = tokenized.split_by_type(TokenType.PIPE)

    assert len(splitted) == 3
    assert splitted[0].to_string_list() == ["1"]
    assert splitted[1].size() == 0
    assert splitted[2].to_string_list() == ["2", "3"]
Esempio n. 10
0
def test_several():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([
        Token(TokenType.SIMPLE, "$var1"),
        Token(TokenType.SIMPLE, "var2"),
        Token(TokenType.SIMPLE, "$var3")
    ])
    environment.put("var1", "1")
    environment.put("var2", "2")
    environment.put("var3", "3")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["1", "var2", "3"]
Esempio n. 11
0
def test_parse_double_quotes_inside_single():
    parser = Parser()
    command = "'\"1\"'"
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 1
    __check_token(Token(TokenType.SINGLE_QUOTED, "'\"1\"'"), tokenized[0])
Esempio n. 12
0
def test_parse_double_quotes():
    parser = Parser()
    command = "\"double\""
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 1
    __check_token(Token(TokenType.DOUBLE_QUOTED, "\"double\""), tokenized[0])
Esempio n. 13
0
def test_no_variable():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list(
        [Token(TokenType.DOUBLE_QUOTED, "var == $var")])

    with pytest.raises(Exception):
        _ = preprocessor.process(tokenized, environment)
Esempio n. 14
0
def test_parse_spaces():
    parser = Parser()
    command = "   "
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    for token in tokenized:
        __check_token(Token(TokenType.EMPTY, " "), token)
Esempio n. 15
0
def test_substitute_only_once():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([Token(TokenType.SIMPLE, "$var1")])
    environment.put("var1", "$var1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["$var1"]
Esempio n. 16
0
def test_single_quoted():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([Token(TokenType.SINGLE_QUOTED, "'$var'")])
    environment.put("var", "1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["'$var'"]
Esempio n. 17
0
def test_token_with_no_variable():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list([Token(TokenType.SIMPLE, "var")])
    environment.put("var", "1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["var"]
Esempio n. 18
0
def test_not_contains():
    tokenized = Tokenized()

    assert not tokenized.contains_token(TokenType.SIMPLE)
    tokenized.add(Token(TokenType.SIMPLE, "simple"))

    assert not tokenized.contains_token(TokenType.DOUBLE_QUOTED)
    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))

    assert not tokenized.contains_token(TokenType.SINGLE_QUOTED)
    tokenized.add(Token(TokenType.SINGLE_QUOTED, "'single'"))

    assert not tokenized.contains_token(TokenType.PIPE)
    tokenized.add(Token(TokenType.PIPE, "|"))

    assert not tokenized.contains_token(TokenType.ASSIGNMENT)
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))

    assert not tokenized.contains_token(TokenType.EMPTY)
Esempio n. 19
0
def test_double_quoted():
    preprocessor = Preprocessor()
    environment = Environment()
    tokenized = Tokenized.from_list(
        [Token(TokenType.DOUBLE_QUOTED, "\"var == $var\"")])
    environment.put("var", "1")

    processed = preprocessor.process(tokenized, environment)

    assert processed.to_string_list() == ["\"var == 1\""]
Esempio n. 20
0
def test_to_string_list():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    tokenized.add(Token(TokenType.SINGLE_QUOTED, "'single'"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    tokenized.add(Token(TokenType.EMPTY, " "))

    expected = ["simple", "\"double\"", "'single'", "|", "=", " "]
    assert expected == tokenized.to_string_list()
Esempio n. 21
0
def test_make_string():
    tokenized = Tokenized()
    tokenized.add(Token(TokenType.SIMPLE, "simple"))
    tokenized.add(Token(TokenType.DOUBLE_QUOTED, "\"double\""))
    tokenized.add(Token(TokenType.SINGLE_QUOTED, "'single'"))
    tokenized.add(Token(TokenType.PIPE, "|"))
    tokenized.add(Token(TokenType.ASSIGNMENT, "="))
    tokenized.add(Token(TokenType.EMPTY, " "))

    expected = "simple\"double\"'single'|= "
    assert expected == tokenized.make_string()
Esempio n. 22
0
 def process(self, tokenized, environment):
     """
     Returns a Tokenized object with substituted variables from the environment
     in the given tokenized string.
     Substitution is not performed inside single quotes.
     An exception is throwing if there is no variable in the environment.
     Substitution is performed only in one pass.
     Token types are not changed after the substitution.
     """
     result = Tokenized()
     for token in tokenized.to_list():
         string_value = token.get_string_value()
         token_type = token.get_token_type()
         if token_type != TokenType.SINGLE_QUOTED:
             string_value = self.__substitute(string_value, environment)
         result.add(Token(token_type, string_value))
     return result
Esempio n. 23
0
def test_not_normalize_simple():
    single_quoted = Token(TokenType.SIMPLE, "\"token\"")
    normalized = single_quoted.get_string_with_normalized_quotes()
    assert normalized == "\"token\""
Esempio n. 24
0
def test_normalized_single():
    single_quoted = Token(TokenType.SINGLE_QUOTED, "'token'")
    normalized = single_quoted.get_string_with_normalized_quotes()
    assert normalized == "token"
Esempio n. 25
0
def test_normalize_only_once_single():
    single_quoted = Token(TokenType.SINGLE_QUOTED, "\'\'token\'\'")
    normalized = single_quoted.get_string_with_normalized_quotes()
    assert normalized == "\'token\'"
Esempio n. 26
0
def test_normalized_double():
    single_quoted = Token(TokenType.DOUBLE_QUOTED, "\"token\"")
    normalized = single_quoted.get_string_with_normalized_quotes()
    assert normalized == "token"
Esempio n. 27
0
def test_parse_pipe():
    parser = Parser()
    command = "pwd|pwd |pwd | pwd| pwd"
    tokenized = parser.parse(command).to_list()

    expected = [
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.SIMPLE, "pwd")
    ]

    assert len(tokenized) == len(expected)
    for i, token in enumerate(tokenized):
        __check_token(expected[i], token)