def annotate_file(config, infile, outfile, outfmt=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = __main__.detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs) parse_tree = parser.parse(tokens, parse_db) if outfmt == "page": html_content = render.get_html(parse_tree, fullpage=True) outfile.write(html_content) return if outfmt == "stub": html_content = render.get_html(parse_tree, fullpage=False) outfile.write(html_content) return raise ValueError("Invalid output format: {}".format(outfmt))
def process_file(config, infile, outfile, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs) parse_tree = parser.parse(tokens, parse_db) if dump == "parse": parser.dump_tree([parse_tree], outfile) return if dump == "markup": dump_markup([parse_tree], config, outfile) return box_tree = formatter.layout_tree(parse_tree, config) if dump == "layout": formatter.dump_tree([box_tree], outfile) return text = formatter.write_tree(box_tree, config, infile_content) if config.emit_byteorder_mark: outfile.write("\ufeff") outfile.write(text)
def process_file(config, infile, outfile, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == 'lex': for token in tokens: outfile.write('{}\n'.format(token)) return config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_tree = parser.parse(tokens, config.fn_spec) if dump == 'parse': parser.dump_tree([parse_tree], outfile) return box_tree = formatter.layout_tree(parse_tree, config) if dump == 'layout': formatter.dump_tree([box_tree], outfile) return text = formatter.write_tree(box_tree, config, infile_content) outfile.write(text)
def do_type_test(self, input_str, expect_tree): """ Run the parser to get the fst, then compare the result to the types in the ``expect_tree`` tuple tree. """ tokens = lexer.tokenize(input_str) fst_root = parser.parse(tokens, self.config.fn_spec) assert_tree_type(self, [fst_root], expect_tree)
def assert_parse(test, input_str, expect_tree): """ Run the parser to get the fst, then compare the result to the types in the ``expect_tree`` tuple tree. """ tokens = lexer.tokenize(input_str) fst_root = parser.parse(tokens, test.parse_db) assert_parse_tree(test, [fst_root], expect_tree)
def do_layout_test(self, input_str, expect_tree, strip_len=6): """ Run the formatter on the input string and assert that the result matches the output string """ input_str = strip_indent(input_str, strip_len) tokens = lexer.tokenize(input_str) parse_tree = parser.parse(tokens, self.config.fn_spec) box_tree = formatter.layout_tree(parse_tree, self.config) assert_tree(self, [box_tree], expect_tree)
def assert_layout(test, input_str, expect_tree, strip_len=0): """ Run the formatter on the input string and assert that the result matches the output string """ input_str = strip_indent(input_str, strip_len) tokens = lexer.tokenize(input_str) parse_tree = parser.parse(tokens, test.parse_db) box_tree = formatter.layout_tree(parse_tree, test.config) assert_layout_tree(test, [box_tree], expect_tree)
def process_file(config, infile, outfile, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_tree = parser.parse(tokens, config.fn_spec) if dump == "parse": parser.dump_tree([parse_tree], outfile) return if dump == "markup": dump_markup([parse_tree], config, outfile) return if dump == "html-page": html_content = render.get_html(parse_tree, fullpage=True) outfile.write(html_content) return if dump == "html-stub": html_content = render.get_html(parse_tree, fullpage=False) outfile.write(html_content) return box_tree = formatter.layout_tree(parse_tree, config) if dump == "layout": infile.seek(0) formatter.dump_tree([box_tree], outfile) return text = formatter.write_tree(box_tree, config, infile_content) if config.emit_byteorder_mark: outfile.write("\ufeff") outfile.write(text)
def process_file(config, infile_content, dump=None, extra=None): """ Parse the input cmake file, re-format it, and print to the output file. """ outfile = io.StringIO(newline='') if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return outfile.getvalue() config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs) parse_tree = parser.parse(tokens, parse_db) if dump == "parse": parser.dump_tree([parse_tree], outfile) return outfile.getvalue() if dump == "markup": dump_markup([parse_tree], config, outfile) return outfile.getvalue() box_tree = formatter.layout_tree(parse_tree, config) if dump == "layout": formatter.dump_tree([box_tree], outfile) return outfile.getvalue() if extra is not None: extra["reflow_valid"] = box_tree.reflow_valid outstr = formatter.write_tree(box_tree, config, infile_content) if config.emit_byteorder_mark: return "\ufeff" + outstr return outstr