def test_unordered_line_items(): tokenized_line = TokenizeLine(unordered_line) assert isinstance(tokenized_line, UnorderedItemLine) assert tokenized_line.nested == 0 tokenized_line = TokenizeLine(nested_unordered_line) assert isinstance(tokenized_line, UnorderedItemLine) assert tokenized_line.nested == 2
def test_all_nested_headers(): for position, test_str in enumerate(nested_tests): token = TokenizeLine(test_str) assert isinstance(token, HeaderLine) assert token.level == position + 1, f"Expected {position+1} for level but got {token.level} with {test_str!r}" assert token.nested == 2
def test_tokenize_line_recognizes_html(): blocks = Blocker(test_document) for block in blocks: for line in block: product = TokenizeLine(line) assert isinstance(product, ( HTMLLine, CodeLine, ))
def test__line_tokens__OrdereredItemLine__testconsume_detects_ordered_items_correctly( ): normal_test = "123. Hello World" expected = OrderedItemLine("Hello World", False) _, actual = OrderedItemLine.TestAndConsume(normal_test) assert actual is not None assert actual.content == expected.content assert actual.nested == 0 nested_test = " 123. Hello World" expected = UnorderedItemLine("Hello World", True) actual = TokenizeLine(nested_test) assert isinstance(actual, OrderedItemLine) assert actual.content == expected.content assert actual.nested == 2
def test_feeding_lines_to_tokenizeline_does_not_break(): tokenized_blocks = [] for raw_blocks in Blocker(test): tokenized_line = [] for raw_line in raw_blocks: token = TokenizeLine(raw_line, None, [Line]) assert isinstance(token, Line) tokenized_line.append(token) tokenized_blocks.append(tokenized_line) assert len(tokenized_blocks) == 1 assert len(tokenized_blocks[0]) == 12
def test_header_content_is_correct(): for position, test_str in enumerate(tests): token = TokenizeLine(test_str) assert token.content == f"h{token.level}"
def test_blockquote_detected_and_is_nested(): tokenized_line = TokenizeLine(nested_blockquote, None, []) assert tokenized_line.nested is True
def test_detects_blockquote(): tokenized_line = TokenizeLine(blockquote, None, []) assert isinstance(tokenized_line, QuotedLine) assert tokenized_line.nested is False
def test_detected_tabbed_codeline(): tokenized_line = TokenizeLine(tabbed_code, BlankLine, []) assert isinstance(tokenized_line, CodeLine)
def test_nested_spaced_code(): tokenized_line = TokenizeLine(nested_spaced_code, BlankLine, []) assert isinstance(tokenized_line, CodeLine) assert tokenized_line.nested == 2