def process_file(config, infile, outfile, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs) parse_tree = parser.parse(tokens, parse_db) if dump == "parse": parser.dump_tree([parse_tree], outfile) return if dump == "markup": dump_markup([parse_tree], config, outfile) return box_tree = formatter.layout_tree(parse_tree, config) if dump == "layout": formatter.dump_tree([box_tree], outfile) return text = formatter.write_tree(box_tree, config, infile_content) if config.emit_byteorder_mark: outfile.write("\ufeff") outfile.write(text)
def process_file(config, infile, outfile, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == 'lex': for token in tokens: outfile.write('{}\n'.format(token)) return config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_tree = parser.parse(tokens, config.fn_spec) if dump == 'parse': parser.dump_tree([parse_tree], outfile) return box_tree = formatter.layout_tree(parse_tree, config) if dump == 'layout': formatter.dump_tree([box_tree], outfile) return text = formatter.write_tree(box_tree, config, infile_content) outfile.write(text)
def process_file(config, infile, outfile, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_tree = parser.parse(tokens, config.fn_spec) if dump == "parse": parser.dump_tree([parse_tree], outfile) return if dump == "markup": dump_markup([parse_tree], config, outfile) return if dump == "html-page": html_content = render.get_html(parse_tree, fullpage=True) outfile.write(html_content) return if dump == "html-stub": html_content = render.get_html(parse_tree, fullpage=False) outfile.write(html_content) return box_tree = formatter.layout_tree(parse_tree, config) if dump == "layout": infile.seek(0) formatter.dump_tree([box_tree], outfile) return text = formatter.write_tree(box_tree, config, infile_content) if config.emit_byteorder_mark: outfile.write("\ufeff") outfile.write(text)
def process_file(config, infile_content, dump=None, extra=None): """ Parse the input cmake file, re-format it, and print to the output file. """ outfile = io.StringIO(newline='') if config.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return outfile.getvalue() config.first_token = lexer.get_first_non_whitespace_token(tokens) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs) parse_tree = parser.parse(tokens, parse_db) if dump == "parse": parser.dump_tree([parse_tree], outfile) return outfile.getvalue() if dump == "markup": dump_markup([parse_tree], config, outfile) return outfile.getvalue() box_tree = formatter.layout_tree(parse_tree, config) if dump == "layout": formatter.dump_tree([box_tree], outfile) return outfile.getvalue() if extra is not None: extra["reflow_valid"] = box_tree.reflow_valid outstr = formatter.write_tree(box_tree, config, infile_content) if config.emit_byteorder_mark: return "\ufeff" + outstr return outstr