def test_insert(self): expected_md = [ { 'h2': '##' }, { 'text_line': 'this is h2' }, { 'code_start': '```c' }, { 'text_line': 'int main() {' }, { 'text_line': ' return 0' }, { 'text_line': '}' }, { 'code_end': '```' }, { 'text_line': '' }, { 'h2': '##' }, { 'text_line': 'error info' }, { 'code_start': '```' }, { 'text_line': 'this is error info' }, { 'code_end': '```' }, { 'text_line': '' }, { 'text_line': 'just a comment' }, ] token_list = cga.make_token_list(self.md) expected_token_list = cga.make_token_list(expected_md) # self.assertNotEqual(expected_token_list[0], token_list[0]) info_block = ca.build_block('this is error info') parser.insert(token_list[6], info_block) self.assertEqual(expected_token_list[6], token_list[6]) self.assertEqual(expected_token_list[0], token_list[0])
def test_make_token_list(self): tokens = [{ 'h2': '##' }, { 'text_line': 'this is h2' }, { 'code_start': '```c' }, { 'text_line': 'int main() {' }, { 'text_line': ' return 0;' }, { 'text_line': '}' }, { 'code_end': '```' }] expected = [cga.TokenNode(i) for i in tokens] for i in range(len(expected) - 1): expected[i].next = expected[i + 1] nodelist = cga.make_token_list(tokens) self.assertListEqual(expected, nodelist)
def test_head_and_end(self): path = write2file(self.md) tokens = lex(path) node_list = cga.make_token_list(tokens) code_block = parser.CodeBlock(parser.CODE_TYPE.C) for i in node_list: code_block.append(i) self.assertEqual(node_list[0], code_block.head) self.assertEqual(node_list[-1], code_block.end)
def execute_cga(i_file_path: str, o_file_path: str) -> None: tokens = ds.lex(i_file_path) node_list = make_token_list(tokens) all_code_block = parser.hunt(node_list) for code_block in all_code_block: info_block = ca.build_block(ca.gcc(code_block.code)) parser.insert(code_block.end, info_block) gener.write_to_md(node_list, o_file_path)
def test_write_to_md_without_inserted(self): node_list = cga.make_token_list(self.md) target = util.write2file('') gener.write_to_md(node_list, target) with open(target, mode='r', encoding='utf-8') as f: res = f.read() f.close() expected = [ '## this is h2', '```c', 'int main() {', ' return 0', '}', '```' ] self.assertEqual('\n'.join(expected) + '\n', res)
def test_hunt(self): node_list = cga.make_token_list(self.md) prey = parser.hunt(node_list) block = parser.CodeBlock(parser.CODE_TYPE.C) block.append(cga.TokenNode({'code_start': '```c'})) block.append(cga.TokenNode({'text_line': 'int main() {'})) block.append(cga.TokenNode({'text_line': ' return 0'})) block.append(cga.TokenNode({'text_line': '}'})) block.append(cga.TokenNode({'code_end': '```'})) expect = [block] for f, s in zip(expect, prey): self.assertEqual(f, s)
def test_code(self): md = [ '```c', 'int main() {', ' printf("hello!");', 'return 0;', '}', '```' ] path = write2file('\n'.join(md) + '\n') tokens = lex(path) node_list = cga.make_token_list(tokens) code_block = parser.CodeBlock(parser.CODE_TYPE.C) for i in node_list: code_block.append(i) self.assertEqual('int main() {\n printf("hello!");\nreturn 0;\n}\n', code_block.code)
def test_insert_tokens(self): token_list = cga.make_token_list(self.md) expected_tokens = [ { 'h2': '##' }, { 'text_line': 'this is h2' }, { 'code_start': '```c' }, { 'text_line': 'int main() {' }, { 'h2': 'error info' }, { 'code_start': '```' }, { 'text_line': 'this is error info' }, { 'code_end': '```' }, { 'text_line': ' return 0' }, { 'text_line': '}' }, { 'code_end': '```' }, { 'text_line': 'just a comment' }, ] expected_token_list = cga.make_token_list(expected_tokens) self.assertNotEqual(expected_token_list[0], token_list[0]) tokens_for_inserting = [ { 'h2': 'error info' }, { 'code_start': '```' }, { 'text_line': 'this is error info' }, { 'code_end': '```' }, ] parser.insert_tokens(token_list[3], tokens_for_inserting) self.assertEqual(expected_token_list[0], token_list[0]) node = token_list[3] for _ in tokens_for_inserting: self.assertIsNotNone(node.next) node = node.next self.assertIsNone(token_list[-1].next)