def test_tokens_of_different_classes_are_not_equal(self): # see http://bugs.python.org/issue16279 self.assertFalse( condent.NonDelimiter(content=12) == condent.RightDelimiter( delimiter=12)) self.assertNotEqual( condent.NonDelimiter(content=12), condent.RightDelimiter(delimiter=12), )
def test_it_creates_right_delimiter_tokens(self): one, another = mock.Mock(), mock.Mock() parsed = iter([one, ">", another]) tokens = condent.tokenize(parsed, self.left_delims, self.right_delims) self.assertEqual(list(tokens), [ condent.NonDelimiter(content=one), condent.RightDelimiter(delimiter=">"), condent.NonDelimiter(content=another), ])
def test_it_empties_the_stack_when_done(self): tokens = [ condent.LeftDelimiter(before="foo", delimiter="("), condent.NonDelimiter(content="bar"), condent.LeftDelimiter(before="", delimiter="("), condent.NonDelimiter(content="20"), ] got = self.condenter.redent([tokens]) self.assertEqual("".join(got), "foo(bar(20")
def test_it_empties_the_stack_when_partially_done(self): def side_effect(before, left, items, right): return before + left + "".join(items) + right self.builder.build.side_effect = side_effect tokens = [ condent.LeftDelimiter(before="foo", delimiter="("), condent.NonDelimiter(content="bar"), condent.LeftDelimiter(before="", delimiter="("), condent.NonDelimiter(content="30"), condent.RightDelimiter(delimiter=")"), ] got = self.condenter.redent([tokens]) self.assertEqual("".join(got), "foo(bar(30)")
def test_it_creates_non_delimiter_tokens(self): content = mock.Mock() parsed = iter([content]) tokens = condent.tokenize(parsed, self.left_delims, self.right_delims) self.assertEqual(list(tokens), [ condent.NonDelimiter(content=content), ])
def test_it_creates_left_delimiter_tokens_with_before(self): before, content = mock.Mock(), mock.Mock() parsed = iter([before, "<", content]) tokens = condent.tokenize(parsed, self.left_delims, self.right_delims) self.assertEqual(list(tokens), [ condent.LeftDelimiter(before=before, delimiter="<"), condent.NonDelimiter(content=content), ])
def test_it_can_tokenize_repeated_right_delimiters(self): parsed = iter(["foo", "<", "bar, baz", "<", "quux", ">", ">"]) tokens = condent.tokenize(parsed, self.left_delims, self.right_delims) self.assertEqual( list(tokens), [ condent.LeftDelimiter(before="foo", delimiter="<"), condent.LeftDelimiter(before="bar, baz", delimiter="<"), condent.NonDelimiter(content="quux"), condent.RightDelimiter(delimiter=">"), condent.RightDelimiter(delimiter=">"), ] )