Example #1
0
    def test_it_redents_the_examples(self):

        config = Config()
        builder = LiteralBuilder(config)
        delimiters = DELIMITERS.keys() + DELIMITERS.values()
        parser = ParsesDelimiters(delimiters)

        with open(EXAMPLE_FILE) as examples, open(REDENTED_FILE) as expected:
            examples.readline(), expected.readline()  # remove modeline

            examples = [e.splitlines(True) for e in examples.read().split(SEP)]
            expected = [e.splitlines(True) for e in expected.read().split(SEP)]

            for example, expect in zip(examples, expected):
                tokens = (tokenize(parser.parse(line), DELIMITERS,
                                   DELIMITERS.values()) for line in example)

                example = "".join(Condenter(builder, config).redent(tokens))
                expect = "".join(expect)

                try:
                    self.assertEqual(example, expect)
                except Exception:
                    self.dump(example, expect)
                    raise
Example #2
0
    def test_it_redents_the_examples(self):

        config = Config()
        builder = LiteralBuilder(config)
        delimiters = DELIMITERS.keys() + DELIMITERS.values()
        parser = ParsesDelimiters(delimiters)

        with open(EXAMPLE_FILE) as examples, open(REDENTED_FILE) as expected:
            examples.readline(), expected.readline()  # remove modeline

            examples = [e.splitlines(True) for e in examples.read().split(SEP)]
            expected = [e.splitlines(True) for e in expected.read().split(SEP)]

            for example, expect in zip(examples, expected):
                tokens = (
                    tokenize(parser.parse(line), DELIMITERS, DELIMITERS.values())
                    for line in example
                )

                got = "".join(Condenter(builder, config).redent(tokens))
                expect = "".join(expect)

                try:
                    self.assertEqual(got, expect)
                except Exception:
                    self.dump(example, got, expect)
                    raise
Example #3
0
    def test_it_creates_non_delimiter_tokens(self):
        content = mock.Mock()
        parsed = iter([content])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(list(tokens), [
            condent.NonDelimiter(content=content),
        ])
Example #4
0
    def test_it_creates_left_delimiter_tokens_with_before(self):
        before, content = mock.Mock(), mock.Mock()
        parsed = iter([before, "<", content])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(list(tokens), [
            condent.LeftDelimiter(before=before, delimiter="<"),
            condent.NonDelimiter(content=content),
        ])
Example #5
0
    def test_it_creates_non_delimiter_tokens(self):
        content = mock.Mock()
        parsed = iter([content])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(
            list(tokens), [
                condent.NonDelimiter(content=content),
            ]
        )
Example #6
0
    def test_it_creates_right_delimiter_tokens(self):
        one, another = mock.Mock(), mock.Mock()
        parsed = iter([one, ">", another])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(list(tokens), [
            condent.NonDelimiter(content=one),
            condent.RightDelimiter(delimiter=">"),
            condent.NonDelimiter(content=another),
        ])
Example #7
0
    def test_it_creates_left_delimiter_tokens_with_before(self):
        before, content = mock.Mock(), mock.Mock()
        parsed = iter([before, "<", content])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(
            list(tokens), [
                condent.LeftDelimiter(before=before, delimiter="<"),
                condent.NonDelimiter(content=content),
            ]
        )
Example #8
0
    def test_it_creates_right_delimiter_tokens(self):
        one, another = mock.Mock(), mock.Mock()
        parsed = iter([one, ">", another])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(
            list(tokens), [
                condent.NonDelimiter(content=one),
                condent.RightDelimiter(delimiter=">"),
                condent.NonDelimiter(content=another),
            ]
        )
Example #9
0
    def test_it_can_tokenize_repeated_right_delimiters(self):
        parsed = iter(["foo", "<", "bar, baz", "<", "quux", ">", ">"])

        tokens = condent.tokenize(parsed, self.left_delims, self.right_delims)

        self.assertEqual(
            list(tokens), [
                condent.LeftDelimiter(before="foo", delimiter="<"),
                condent.LeftDelimiter(before="bar, baz", delimiter="<"),
                condent.NonDelimiter(content="quux"),
                condent.RightDelimiter(delimiter=">"),
                condent.RightDelimiter(delimiter=">"),
            ]
        )