Example #1
0
def _add_scene(tokens):
    tokens.extend([
        Token(TokenTypes.Scene),
        Token(TokenTypes.Numeral),
        Token(TokenTypes.Colon),
        Token(TokenTypes.EndLine),
    ])
    return tokens
Example #2
0
def _add_act(tokens):
    tokens.extend([
        Token(TokenTypes.Act),
        Token(TokenTypes.Numeral),
        Token(TokenTypes.Colon),
        Token(TokenTypes.EndLine),
    ])
    return tokens
Example #3
0
    def test_GIVEN_two_characters_on_stage_WHEN_one_speaking_THEN_2nd_person_pronoun_gets_not_speaking_character(
            self):
        tokens = [Token(TokenTypes.SecondPronoun), Token(TokenTypes.Eof)]

        parser = Parser(t for t in tokens)

        parser.onstage = ["A", "B"]
        parser.speaking = "A"

        self.assertEqual(parser.character_name(), "B")
Example #4
0
    def test_GIVEN_two_characters_on_stage_WHEN_get_character_by_name_THEN_can_get_that_character(
            self):
        tokens = [Token(TokenTypes.Name, "C"), Token(TokenTypes.Eof)]

        parser = Parser(t for t in tokens)

        parser.onstage = ["A", "B"]
        parser.speaking = "A"

        self.assertEqual(parser.character_name(), "C")
Example #5
0
    def test_GIVEN_trying_to_parse_play_WHEN_minimal_valid_play_THEN_no_errors_raised(
            self):
        tokens = [Token(TokenTypes.EndLine)]  # Have to have an initial line.
        tokens = _add_act(tokens)
        tokens = _add_scene(tokens)
        tokens.append(Token(TokenTypes.Eof))

        parser = Parser(t for t in tokens)

        parser.play()
Example #6
0
    def test_GIVEN_nonsense_token_WHEN_get_character_by_name_THEN_parse_error(
            self):
        tokens = [Token(TokenTypes.Numeral), Token(TokenTypes.Eof)]

        parser = Parser(t for t in tokens)

        parser.onstage = ["A", "B"]
        parser.speaking = "A"

        with self.assertRaises(SPLSyntaxError):
            parser.character_name()
Example #7
0
    def test_GIVEN_exeunt_statement_WHEN_parsing_stage_control_THEN_no_parse_error(
            self):
        tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Exeunt),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        parser = Parser(t for t in tokens)

        parser.stagecontrol()
Example #8
0
    def test_GIVEN_a_scene_declaration_WHEN_tokenizing_THEN_tokens_contain_necessary_information(self):
        lexer = Lexer("Scene I: Description.")

        expected_tokens = [
            Token(TokenTypes.Scene),
            Token(TokenTypes.Numeral, "i"),
            Token(TokenTypes.Colon),
            Token(TokenTypes.EndLine),
            Token(TokenTypes.Eof),
        ]

        tokens = [t for t in lexer.token_generator()]
        self.assertEqual(tokens, expected_tokens)
Example #9
0
    def test_GIVEN_an_act_declaration_WHEN_tokenizing_THEN_tokens_contain_necessary_information(self):
        lexer = Lexer("Act IV: Description.")

        expected_tokens = [
            Token(TokenTypes.Act),
            Token(TokenTypes.Numeral, "iv"),
            Token(TokenTypes.Colon),
            Token(TokenTypes.EndLine),
            Token(TokenTypes.Eof),
        ]

        tokens = [t for t in lexer.token_generator()]
        self._assert_tokens_equal(tokens, expected_tokens)
Example #10
0
    def test_GIVEN_an_exit_statement_WHEN_tokenizing_THEN_tokens_contain_necessary_information(self):
        lexer = Lexer("[Exit Hamlet]")

        expected_tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Exit),
            Token(TokenTypes.Name, "hamlet"),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        tokens = [t for t in lexer.token_generator()]
        self._assert_tokens_equal(tokens, expected_tokens)
Example #11
0
    def test_GIVEN_enter_statement_with_one_character_WHEN_parsing_stage_control_THEN_no_parse_error(
            self):
        tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Enter),
            Token(TokenTypes.Name, "A"),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        parser = Parser(t for t in tokens)

        parser.stagecontrol()
Example #12
0
    def test_GIVEN_enter_statement_with_no_characters_WHEN_parsing_stage_control_THEN_parse_error(
            self):
        tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Enter),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        parser = Parser(t for t in tokens)

        with self.assertRaises(SPLSyntaxError):
            parser.stagecontrol()
Example #13
0
    def test_GIVEN_variable_does_not_exist_WHEN_declaring_variable_THEN_no_parse_error(
            self):
        tokens = [
            Token(TokenTypes.Name, "A"),
            Token(TokenTypes.Comma),
            Token(TokenTypes.Noun, 1),
            Token(TokenTypes.EndLine, 1),
            Token(TokenTypes.Eof, 1),
        ]

        parser = Parser(t for t in tokens)

        parser.var_assignment()
Example #14
0
    def test_GIVEN_a_character_not_on_stage_WHEN_that_character_leaves_THEN_parse_error(
            self):
        tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Exit),
            Token(TokenTypes.Name, "A"),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        parser = Parser(t for t in tokens)

        with self.assertRaises(SPLSyntaxError):
            parser.stagecontrol()
Example #15
0
    def test_GIVEN_a_character_on_stage_WHEN_that_character_leaves_THEN_no_parse_error(
            self):
        tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Exit),
            Token(TokenTypes.Name, "A"),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        parser = Parser(t for t in tokens)
        parser.onstage = ["A"]

        parser.stagecontrol()
Example #16
0
    def test_GIVEN_trying_to_parse_play_WHEN_eof_is_the_only_token_THEN_syntax_error_is_raised(
            self):
        tokens = [Token(TokenTypes.Eof)]
        parser = Parser(t for t in tokens)

        with self.assertRaises(SPLSyntaxError):
            parser.play()
Example #17
0
    def test_GIVEN_many_characters_on_stage_WHEN_exeunt_THEN_noone_left_on_stage(
            self):
        tokens = [
            Token(TokenTypes.OpenSqBracket),
            Token(TokenTypes.Exeunt),
            Token(TokenTypes.CloseSqBracket),
            Token(TokenTypes.Eof),
        ]

        parser = Parser(t for t in tokens)

        parser.onstage = ["A", "B", "C", "D"]

        parser.stagecontrol()

        self.assertEqual(parser.onstage, [])
Example #18
0
    def test_GIVEN_variable_already_exists_WHEN_declaring_variable_THEN_parse_error(
            self):
        tokens = [
            Token(TokenTypes.Name, "A"),
            Token(TokenTypes.Comma),
            Token(TokenTypes.Noun, 1),
            Token(TokenTypes.EndLine, 1),
            Token(TokenTypes.Eof, 1),
        ]

        parser = Parser(t for t in tokens)

        parser.vars_table = ["A"]

        with self.assertRaises(SPLSyntaxError):
            parser.var_assignment()
Example #19
0
    def test_GIVEN_an_assignment_statement_WHEN_tokenizing_THEN_tokens_contain_necessary_information(self):
        lexer = Lexer("Romeo: You are a stupid pig!")

        expected_tokens = [
            Token(TokenTypes.Name, "romeo"),
            Token(TokenTypes.Colon),
            Token(TokenTypes.SecondPronoun),
            Token(TokenTypes.Adj, 2),
            Token(TokenTypes.Noun, -1),
            Token(TokenTypes.EndLine),
            Token(TokenTypes.Eof),
        ]

        tokens = [t for t in lexer.token_generator()]
        self._assert_tokens_equal(tokens, expected_tokens)
Example #20
0
    def test_GIVEN_a_conditional_statement_WHEN_tokenizing_THEN_tokens_contain_necessary_information(self):
        lexer = Lexer("Romeo: Am I equal to Lady Macbeth?")

        expected_tokens = [
            Token(TokenTypes.Name, "romeo"),
            Token(TokenTypes.Colon),
            Token(TokenTypes.QuestionStart),
            Token(TokenTypes.FirstPronoun),
            Token(TokenTypes.Name, "lady macbeth"),
            Token(TokenTypes.QuestionMark),
            Token(TokenTypes.Eof),
        ]

        tokens = [t for t in lexer.token_generator()]
        self._assert_tokens_equal(tokens, expected_tokens)
Example #21
0
 def token_generator(self):
     while self.pos < len(self.text):
         token = self.get_next_token()
         if token.type is not TokenTypes.NoOp:
             yield token
     yield Token(TokenTypes.Eof)
Example #22
0
    def get_next_token(self):

        mapping = OrderedDict([
            ("(act)",
                lambda: Token(TokenTypes.Act)),
            ("(scene)",
                lambda: Token(TokenTypes.Scene)),
            ("(speak your mind)",
                lambda: Token(TokenTypes.Print, True)),
            ("(open your heart)",
                lambda: Token(TokenTypes.Print, False)),
            ("(open your mind)",
                lambda: Token(TokenTypes.Input, True)),
            ("(listen to your heart)",
                lambda: Token(TokenTypes.Input, False)),
            ("(let us proceed to |let us return to )",
                lambda: Token(TokenTypes.Goto, text)),
            (regex_from_words(self.names),
                lambda: Token(TokenTypes.Name, text)),
            (regex_from_words(self.adjectives),
                lambda: Token(TokenTypes.Adj, 2)),
            (regex_from_words(self.nouns),
                lambda: Token(TokenTypes.Noun, 1)),
            (regex_from_words(self.negative_nouns),
                lambda: Token(TokenTypes.Noun, -1)),
            (regex_from_words(["with", "and"]),
                lambda: Token(TokenTypes.Add, operators.Operators.ADD)),
            ("(\.|!)",
                lambda: Token(TokenTypes.EndLine)),
            ("(\?)",
                lambda: Token(TokenTypes.QuestionMark)),
            ("(,)",
                lambda: Token(TokenTypes.Comma)),
            ("(\[)",
                lambda: Token(TokenTypes.OpenSqBracket)),
            ("(\])",
                lambda: Token(TokenTypes.CloseSqBracket)),
            ("(:)",
                lambda: Token(TokenTypes.Colon)),
            ("({})".format("|".join(SECOND_PERSON_PRONOUNS)),
                lambda: Token(TokenTypes.SecondPronoun)),
            ("({})".format("|".join(FIRST_PERSON_PRONOUNS)),
                lambda: Token(TokenTypes.FirstPronoun)),
            ("(enter)",
                lambda: Token(TokenTypes.Enter)),
            ("(exit)",
                lambda: Token(TokenTypes.Exit)),
            ("(exeunt)",
                lambda: Token(TokenTypes.Exeunt)),
            ("(if so)",
                lambda: Token(TokenTypes.IfSo)),
            (" ([ivx]+)[.:]",
                lambda: Token(TokenTypes.Numeral, text)),
            ("(are|is|am) (?:{0}|{1}|{2}) ?(?:equal to) ?(?:{0}|{1}|{2})\?".format(
                regex_from_words(FIRST_PERSON_PRONOUNS),
                regex_from_words(SECOND_PERSON_PRONOUNS),
                regex_from_words(self.names)
                ),
                lambda: Token(TokenTypes.QuestionStart)),
        ])

        for regexp in mapping:
            match, text = self.text_starts_with_regex(regexp)
            if match:
                self.pos += len(text)
                return mapping[regexp]()
        else:
            self.pos += 1
            return Token(TokenTypes.NoOp)