Esempio n. 1
0
def test_parse_double_quotes_inside_single():
    parser = Parser()
    command = "'\"1\"'"
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 1
    __check_token(Token(TokenType.SINGLE_QUOTED, "'\"1\"'"), tokenized[0])
Esempio n. 2
0
class Interpreter:
    """
    Interprets a command given in a string representation.
    Creates a local environment once constructed.
    """
    def __init__(self, controller):
        self._parser = Parser()
        self._preprocessor = Preprocessor()
        self._environment = Environment()
        self._factory_manager = FactoryManager(self._environment, controller)

    def process(self, command):
        """
        Passes the given command through parser and preprocessor.
        Creates and executes a command built with a tokenized result.
        :param command: command in a string representation
        """
        tokenized = self._parser.parse(command)

        substituted = self._preprocessor.process(tokenized, self._environment)

        final_tokenized = self._parser.parse(substituted.make_string())
        # We needed to store empty tokens at the first step to convert the tokenized result
        # to a string to perform substitution.
        final_tokenized.remove(TokenType.EMPTY)

        factory = self._factory_manager.choose_factory(final_tokenized)
        executable = factory.create_executable(final_tokenized)

        executable.execute()
Esempio n. 3
0
def test_parse_double_quotes():
    parser = Parser()
    command = "\"double\""
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 1
    __check_token(Token(TokenType.DOUBLE_QUOTED, "\"double\""), tokenized[0])
Esempio n. 4
0
def test_parse_spaces():
    parser = Parser()
    command = "   "
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    for token in tokenized:
        __check_token(Token(TokenType.EMPTY, " "), token)
Esempio n. 5
0
def test_empty_pipe():
    parser = Parser()
    command = "||"
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 2
    __check_token(Token(TokenType.PIPE, "|"), tokenized[0])
    __check_token(Token(TokenType.PIPE, "|"), tokenized[1])
Esempio n. 6
0
def test_parse_subsequent_quotes():
    parser = Parser()
    command = "''\"\"''"
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    __check_token(Token(TokenType.SINGLE_QUOTED, "''"), tokenized[0])
    __check_token(Token(TokenType.DOUBLE_QUOTED, "\"\""), tokenized[1])
    __check_token(Token(TokenType.SINGLE_QUOTED, "''"), tokenized[2])
Esempio n. 7
0
def test_parse_spaces_at_ends():
    parser = Parser()
    command = " simple "
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    __check_token(Token(TokenType.EMPTY, " "), tokenized[0])
    __check_token(Token(TokenType.SIMPLE, "simple"), tokenized[1])
    __check_token(Token(TokenType.EMPTY, " "), tokenized[2])
Esempio n. 8
0
def test_parse_assignment():
    parser = Parser()
    command = "var=\"1\""
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 3
    __check_token(Token(TokenType.SIMPLE, "var"), tokenized[0])
    __check_token(Token(TokenType.ASSIGNMENT, "="), tokenized[1])
    __check_token(Token(TokenType.DOUBLE_QUOTED, "\"1\""), tokenized[2])
Esempio n. 9
0
def test_fail_on_incorrect_quotes():
    parser = Parser()

    mixed = "'\"'\""
    unclosed_single = "'\"\""
    unclosed_double = "''\""

    with pytest.raises(Exception):
        _ = parser.parse(mixed)

    with pytest.raises(Exception):
        _ = parser.parse(unclosed_single)

    with pytest.raises(Exception):
        _ = parser.parse(unclosed_double)
Esempio n. 10
0
def test_parse_pipe():
    parser = Parser()
    command = "pwd|pwd |pwd | pwd| pwd"
    tokenized = parser.parse(command).to_list()

    expected = [
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.SIMPLE, "pwd"),
        Token(TokenType.PIPE, "|"),
        Token(TokenType.EMPTY, " "),
        Token(TokenType.SIMPLE, "pwd")
    ]

    assert len(tokenized) == len(expected)
    for i, token in enumerate(tokenized):
        __check_token(expected[i], token)
Esempio n. 11
0
def test_parse_empty():
    parser = Parser()
    command = ""
    tokenized = parser.parse(command).to_list()

    assert len(tokenized) == 0
Esempio n. 12
0
 def __init__(self, controller):
     self._parser = Parser()
     self._preprocessor = Preprocessor()
     self._environment = Environment()
     self._factory_manager = FactoryManager(self._environment, controller)