def test_valid_token(self): test_cases = [ ('token', assert_plain('token'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), (' token_preceded_by_space', assert_plain('token_preceded_by_space'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), ('token_followed_by_space ', assert_plain('token_followed_by_space'), source_is_not_at_end( remaining_part_of_current_line=asrt.equals(' '), current_line_number=asrt.equals(1))), ('token_followed_by_other_token other_token', assert_plain('token_followed_by_other_token'), source_is_not_at_end( remaining_part_of_current_line=asrt.equals(' other_token'), current_line_number=asrt.equals(1))), ('<<->> other_token', assert_plain('<<->>'), source_is_not_at_end( remaining_part_of_current_line=asrt.equals(' other_token'), current_line_number=asrt.equals(1))), ('\'single quoted\'', assert_quoted('single quoted', '\'single quoted\''), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), ('\"double quoted\"', assert_quoted('double quoted', '\"double quoted\"'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), (' \'quoted preceded by space\'', assert_quoted('quoted preceded by space', '\'quoted preceded by space\''), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), (' \'quoted followed by space\' ', assert_quoted('quoted followed by space', '\'quoted followed by space\''), source_is_not_at_end( remaining_part_of_current_line=asrt.equals(' '), current_line_number=asrt.equals(1))), (' \'quoted token followed by other token\' \'other_token\'', assert_quoted('quoted token followed by other token', '\'quoted token followed by other token\''), source_is_not_at_end(remaining_part_of_current_line=asrt.equals( ' \'other_token\''), current_line_number=asrt.equals(1))), ] for first_line, token_assertion, source_assertion in test_cases: with self.subTest(msg=repr(first_line)): source = remaining_source(first_line) actual = sut.parse_token_on_current_line(source) token_assertion.apply_with_message(self, actual, 'token') source_assertion.apply_with_message(self, source, 'source')
def test_valid_token(self): test_cases = [ ('token', assert_plain('token'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), (' token_preceded_by_space', assert_plain('token_preceded_by_space'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), ('token_followed_by_space ', assert_plain('token_followed_by_space'), source_is_not_at_end(remaining_part_of_current_line=asrt.equals(' '), current_line_number=asrt.equals(1))), ('token_followed_by_other_token other_token', assert_plain('token_followed_by_other_token'), source_is_not_at_end(remaining_part_of_current_line=asrt.equals(' other_token'), current_line_number=asrt.equals(1))), ('<<->> other_token', assert_plain('<<->>'), source_is_not_at_end(remaining_part_of_current_line=asrt.equals(' other_token'), current_line_number=asrt.equals(1))), ('\'single quoted\'', assert_quoted('single quoted', '\'single quoted\''), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), ('\"double quoted\"', assert_quoted('double quoted', '\"double quoted\"'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), (' \'quoted preceded by space\'', assert_quoted('quoted preceded by space', '\'quoted preceded by space\''), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), (' \'quoted followed by space\' ', assert_quoted('quoted followed by space', '\'quoted followed by space\''), source_is_not_at_end(remaining_part_of_current_line=asrt.equals(' '), current_line_number=asrt.equals(1))), (' \'quoted token followed by other token\' \'other_token\'', assert_quoted('quoted token followed by other token', '\'quoted token followed by other token\''), source_is_not_at_end(remaining_part_of_current_line=asrt.equals(' \'other_token\''), current_line_number=asrt.equals(1))), ] for first_line, token_assertion, source_assertion in test_cases: with self.subTest(msg=repr(first_line)): source = remaining_source(first_line) actual = sut.parse_token_on_current_line(source) token_assertion.apply_with_message(self, actual, 'token') source_assertion.apply_with_message(self, source, 'source')
def test_valid_token(self): test_cases = [ ('token', assert_plain('token'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), ('<<->> other_token', assert_plain('<<->>'), source_is_not_at_end( remaining_part_of_current_line=asrt.equals(' other_token'), current_line_number=asrt.equals(1))), ] for first_line, token_assertion, source_assertion in test_cases: with self.subTest(msg=repr(first_line)): source = remaining_source(first_line) actual = sut.parse_token_or_none_on_current_line(source) token_assertion.apply_with_message(self, actual, 'token') source_assertion.apply_with_message(self, source, 'source')
def test_valid_token(self): test_cases = [ ('token', assert_plain('token'), assert_source(is_at_eol=asrt.is_true, has_current_line=asrt.is_true, current_line_number=asrt.equals(1))), ('<<->> other_token', assert_plain('<<->>'), source_is_not_at_end(remaining_part_of_current_line=asrt.equals(' other_token'), current_line_number=asrt.equals(1))), ] for first_line, token_assertion, source_assertion in test_cases: with self.subTest(msg=repr(first_line)): source = remaining_source(first_line) actual = sut.parse_token_or_none_on_current_line(source) token_assertion.apply_with_message(self, actual, 'token') source_assertion.apply_with_message(self, source, 'source')
def test_valid_token(self): test_cases = [ ('token', assert_plain('token'), ), ('<<->> other_token', assert_plain('<<->>'), ), ] for first_line, token_assertion in test_cases: with self.subTest(msg=repr(first_line)): ts = sut.TokenStream(first_line) token_assertion.apply_with_message(self, ts.head, 'token') assert_is_not_null(self, ts) actual_remaining_source = first_line[ts.position:] self.assertEqual(first_line, actual_remaining_source, 'remaining source according to the "position" attribute')
def test_single_token(self): test_cases = [ ('a', assert_plain('a'), ''), ('b ', assert_plain('b'), ''), ('c ', assert_plain('c'), ' '), ('x \n', assert_plain('x'), '\n'), ('x\n', assert_plain('x'), '\n'), ] for source, expected_token, remaining_source in test_cases: with self.subTest(msg=repr(source)): ts = sut.TokenStream(source) # ACT # consumed_token = ts.consume() # ASSERT # expected_token.apply_with_message(self, consumed_token, 'token') assert_is_null(self, ts) self.assertEqual(remaining_source, ts.remaining_source, 'remaining_source')
def test_single_token(self): test_cases = [ ('a', assert_plain('a'), ''), ('b ', assert_plain('b'), ''), ('c ', assert_plain('c'), ' '), ('x \n', assert_plain('x'), '\n'), ('x\n', assert_plain('x'), '\n'), ] for source, expected_token, remaining_source in test_cases: with self.subTest(msg=repr(source)): ts = sut.TokenStream(source) # ACT # consumed_token = ts.consume() # ASSERT # expected_token.apply_with_message(self, consumed_token, 'token') assert_is_null(self, ts) self.assertEqual(remaining_source, ts.remaining_source, 'remaining_source')
def test_valid_token(self): test_cases = [ ( 'token', assert_plain('token'), ), ( '<<->> other_token', assert_plain('<<->>'), ), ] for first_line, token_assertion in test_cases: with self.subTest(msg=repr(first_line)): ts = sut.TokenStream(first_line) token_assertion.apply_with_message(self, ts.head, 'token') assert_is_not_null(self, ts) actual_remaining_source = first_line[ts.position:] self.assertEqual( first_line, actual_remaining_source, 'remaining source according to the "position" attribute')
def test_multiple_tokens(self): test_cases = [ ('a A', assert_plain('a'), 'A', 'A'), ('b B', assert_plain('b'), 'B', ' B'), ('c C ', assert_plain('c'), 'C', ' C '), ('d D ', assert_plain('d'), 'D', ' D '), ('a A\n_', assert_plain('a'), 'A', 'A\n_'), ] for source, expected_token, second_token, remaining_source in test_cases: with self.subTest(msg=repr(source)): # ACT # ts = sut.TokenStream(source) consumed_token = ts.consume() # ASSERT # assert_is_not_null(self, ts) expected_token.apply_with_message(self, consumed_token, 'consumed token') self.assertEqual(second_token, ts.head.string, 'second token') self.assertEqual(remaining_source, ts.remaining_source, 'remaining_source')
def test_multiple_tokens(self): test_cases = [ ('a A', assert_plain('a'), 'A', 'A'), ('b B', assert_plain('b'), 'B', ' B'), ('c C ', assert_plain('c'), 'C', ' C '), ('d D ', assert_plain('d'), 'D', ' D '), ('a A\n_', assert_plain('a'), 'A', 'A\n_'), ] for source, expected_token, second_token, remaining_source in test_cases: with self.subTest(msg=repr(source)): # ACT # ts = sut.TokenStream(source) consumed_token = ts.consume() # ASSERT # assert_is_not_null(self, ts) expected_token.apply_with_message(self, consumed_token, 'consumed token') self.assertEqual(second_token, ts.head.string, 'second token') self.assertEqual(remaining_source, ts.remaining_source, 'remaining_source')
def test_syntax_error_of_look_ahead_token(self): test_cases = [ ('a "A', assert_plain('a'), '"A'), ] for source, expected_token, remaining_source in test_cases: with self.subTest(msg=repr(source)): # ACT # ts = sut.TokenStream(source) consumed_token = ts.consume() # ASSERT # self.assertTrue(ts.is_null, 'is_null') self.assertIs(LookAheadState.SYNTAX_ERROR, ts.look_ahead_state, 'look_ahead_state') expected_token.apply_with_message(self, consumed_token, 'consumed token') self.assertEqual(remaining_source, ts.remaining_source, 'remaining_source')
def test_valid_token(self): test_cases = [ ('token', assert_plain('token'), ), (' token_preceded_by_space', assert_plain('token_preceded_by_space'), ), ('token_followed_by_space ', assert_plain('token_followed_by_space'), ), ('token_followed_by_2_space ', assert_plain('token_followed_by_2_space'), ), ('token_followed_by_other_token other_token', assert_plain('token_followed_by_other_token'), ), ('token_followed_by_other_token_with_2_space_between other_token', assert_plain('token_followed_by_other_token_with_2_space_between'), ), ('<<->> other_token', assert_plain('<<->>'), ), ('\'single quoted\'', assert_quoted('single quoted', '\'single quoted\''), ), ('\"double quoted\"', assert_quoted('double quoted', '\"double quoted\"'), ), (' \'quoted preceded by space\'', assert_quoted('quoted preceded by space', '\'quoted preceded by space\''), ), (' \'quoted followed by space\' ', assert_quoted('quoted followed by space', '\'quoted followed by space\''), ), (' \'quoted token followed by other token\' \'other_token\'', assert_quoted('quoted token followed by other token', '\'quoted token followed by other token\''), ), ] for first_line, token_assertion in test_cases: with self.subTest(msg=repr(first_line)): ts = sut.TokenStream(first_line) token_assertion.apply_with_message(self, ts.head, 'token') self.assertEqual(first_line, ts.remaining_source, 'remaining source') assert_is_not_null(self, ts)
def test_syntax_error_of_look_ahead_token(self): test_cases = [ ('a "A', assert_plain('a'), '"A'), ] for source, expected_token, remaining_source in test_cases: with self.subTest(msg=repr(source)): # ACT # ts = sut.TokenStream(source) consumed_token = ts.consume() # ASSERT # self.assertTrue(ts.is_null, 'is_null') self.assertIs(LookAheadState.SYNTAX_ERROR, ts.look_ahead_state, 'look_ahead_state') expected_token.apply_with_message(self, consumed_token, 'consumed token') self.assertEqual(remaining_source, ts.remaining_source, 'remaining_source')
def test_valid_token(self): test_cases = [ ( 'token', assert_plain('token'), ), ( ' token_preceded_by_space', assert_plain('token_preceded_by_space'), ), ( 'token_followed_by_space ', assert_plain('token_followed_by_space'), ), ( 'token_followed_by_2_space ', assert_plain('token_followed_by_2_space'), ), ( 'token_followed_by_other_token other_token', assert_plain('token_followed_by_other_token'), ), ( 'token_followed_by_other_token_with_2_space_between other_token', assert_plain( 'token_followed_by_other_token_with_2_space_between'), ), ( '<<->> other_token', assert_plain('<<->>'), ), ( '\'single quoted\'', assert_quoted('single quoted', '\'single quoted\''), ), ( '\"double quoted\"', assert_quoted('double quoted', '\"double quoted\"'), ), ( ' \'quoted preceded by space\'', assert_quoted('quoted preceded by space', '\'quoted preceded by space\''), ), ( ' \'quoted followed by space\' ', assert_quoted('quoted followed by space', '\'quoted followed by space\''), ), ( ' \'quoted token followed by other token\' \'other_token\'', assert_quoted('quoted token followed by other token', '\'quoted token followed by other token\''), ), ] for first_line, token_assertion in test_cases: with self.subTest(msg=repr(first_line)): ts = sut.TokenStream(first_line) token_assertion.apply_with_message(self, ts.head, 'token') self.assertEqual(first_line, ts.remaining_source, 'remaining source') assert_is_not_null(self, ts)
def test(self): test_cases = [ # Single token ('a', 'a', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals(''))), ('b ', 'b ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(2), remaining_source=asrt.equals(''))), ('x \n', 'x ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(2), remaining_source=asrt.equals('\n'))), ('x\ny', 'x', assert_token_stream(head_token=assert_plain('y'), look_ahead_state=asrt.is_not( LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\ny'))), ('x\n y', 'x', assert_token_stream(head_token=assert_plain('y'), look_ahead_state=asrt.is_not( LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\n y'))), # Multiple tokens ('a A', 'a A', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(3), remaining_source=asrt.equals(''))), ('a A\nb B', 'a A', assert_token_stream(head_token=assert_plain('b'), look_ahead_state=asrt.is_not( LookAheadState.NULL), position=asrt.equals(3), remaining_source=asrt.equals('\nb B'))), ('a A\ninvalid_token"', 'a A', assert_token_stream( look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR), is_null=asrt.is_true, position=asrt.equals(3), remaining_source=asrt.equals('\ninvalid_token"'))), # No tokens ('', '', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(0), remaining_source=asrt.equals(''))), (' ', ' ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals(''))), (' \n', ' ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\n'))), (' \n ', ' ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_( LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\n '))), (' \n"invalid quoting', ' ', assert_token_stream( look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR), position=asrt.equals(1), remaining_source=asrt.equals('\n"invalid quoting'))), ] for source, expected_consumed_string, token_stream_assertion in test_cases: with self.subTest(msg=repr(source)): ts = sut.TokenStream(source) # ACT # actual_consumed_string = ts.consume_remaining_part_of_current_line_as_string( ) # ASSERT # self.assertEqual(expected_consumed_string, actual_consumed_string, 'consumed string') token_stream_assertion.apply_with_message( self, ts, 'token stream after parse')
def test(self): test_cases = [ # Single token ('a', 'a', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals(''))), ('b ', 'b ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(2), remaining_source=asrt.equals(''))), ('x \n', 'x ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(2), remaining_source=asrt.equals('\n')) ), ('x\ny', 'x', assert_token_stream(head_token=assert_plain('y'), look_ahead_state=asrt.is_not(LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\ny')) ), ('x\n y', 'x', assert_token_stream(head_token=assert_plain('y'), look_ahead_state=asrt.is_not(LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\n y')) ), # Multiple tokens ('a A', 'a A', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(3), remaining_source=asrt.equals('')) ), ('a A\nb B', 'a A', assert_token_stream(head_token=assert_plain('b'), look_ahead_state=asrt.is_not(LookAheadState.NULL), position=asrt.equals(3), remaining_source=asrt.equals('\nb B')) ), ('a A\ninvalid_token"', 'a A', assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR), is_null=asrt.is_true, position=asrt.equals(3), remaining_source=asrt.equals('\ninvalid_token"')) ), # No tokens ('', '', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(0), remaining_source=asrt.equals('')) ), (' ', ' ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('')) ), (' \n', ' ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\n')) ), (' \n ', ' ', assert_token_stream(is_null=asrt.is_true, look_ahead_state=asrt.is_(LookAheadState.NULL), position=asrt.equals(1), remaining_source=asrt.equals('\n ')) ), (' \n"invalid quoting', ' ', assert_token_stream(look_ahead_state=asrt.is_(LookAheadState.SYNTAX_ERROR), position=asrt.equals(1), remaining_source=asrt.equals('\n"invalid quoting')) ), ] for source, expected_consumed_string, token_stream_assertion in test_cases: with self.subTest(msg=repr(source)): ts = sut.TokenStream(source) # ACT # actual_consumed_string = ts.consume_remaining_part_of_current_line_as_string() # ASSERT # self.assertEqual(expected_consumed_string, actual_consumed_string, 'consumed string') token_stream_assertion.apply_with_message(self, ts, 'token stream after parse')