def setUp(self): self.config.fn_spec.add( 'foo', flags=['BAR', 'BAZ'], kwargs={ "HEADERS": '*', "SOURCES": '*', "DEPENDS": '*' }) self.parse_db.update( parse_funs.get_legacy_parse(self.config.fn_spec).kwargs)
def setUp(self): self.config = configuration.Configuration() parse_db = parse_funs.get_parse_db() self.parse_ctx = parse.ParseContext(parse_db) self.config.parse.fn_spec.add('foo', flags=['BAR', 'BAZ'], kwargs={ "HEADERS": '*', "SOURCES": '*', "DEPENDS": '*' }) self.parse_ctx.parse_db.update( parse_funs.get_legacy_parse(self.config.parse.fn_spec).kwargs)
def setUp(self): self.config.fn_spec.add('foo', flags=['BAR', 'BAZ'], kwargs={ "HEADERS": '*', "SOURCES": '*', "DEPENDS": '*' }) self.parse_db.update( parse_funs.get_legacy_parse(self.config.fn_spec).kwargs) for name, value in vars(self).items(): if callable(value) and name.startswith("test_"): setattr(self, name, WrapTestWithRunFun(self, value))
def process_file(config, infile_content): """ Parse the input cmake file, return the parse tree """ if config.format.line_ending == 'auto': detected = __main__.detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) tokens = lexer.tokenize(infile_content) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.parse.fn_spec).kwargs) ctx = parse.ParseContext(parse_db, config=config) parse_tree = parse.parse(tokens, ctx) parse_tree.build_ancestry() return parse_tree
def process_file(config, local_ctx, infile_content): """ Parse the input cmake file, re-format it, and print to the output file. """ if config.format.line_ending == 'auto': detected = __main__.detect_line_endings(infile_content) config = config.clone() config.set_line_ending(detected) basic_checker.check_basics(config, local_ctx, infile_content) tokens = lexer.tokenize(infile_content) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.parse.fn_spec).kwargs) ctx = parse.ParseContext(parse_db, local_ctx, config) parse_tree = parse.parse(tokens, ctx) parse_tree.build_ancestry() basic_checker.check_parse_tree(config, local_ctx, parse_tree)
def test_all_commands_in_db(self): missing_commands = [] proc = subprocess.Popen(["cmake", "--help-command-list"], stdout=subprocess.PIPE) parse_db = parse_funs.get_parse_db() parse_db.update( parse_funs.get_legacy_parse(commands.get_fn_spec()).kwargs) ignore = IGNORE_LIST with proc.stdout as infile: for line in infile: command = line.strip().decode("utf-8") if command not in parse_db and command not in ignore: missing_commands.append(command) proc.wait() message = "Missing commands:\n " + "\n ".join( sorted(missing_commands)) self.assertFalse(bool(missing_commands), msg=message)
def process_file(config, infile_content, dump=None): """ Parse the input cmake file, re-format it, and print to the output file. """ outfile = io.StringIO(newline='') if config.format.line_ending == 'auto': detected = detect_line_endings(infile_content) config = config.clone() config.format.set_line_ending(detected) tokens = lexer.tokenize(infile_content) if dump == "lex": for token in tokens: outfile.write("{}\n".format(token)) return outfile.getvalue(), True first_token = lexer.get_first_non_whitespace_token(tokens) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.parse.fn_spec).kwargs) ctx = parse.ParseContext(parse_db, config=config) parse_tree = parse.parse(tokens, ctx) if dump == "parse": dump_parse([parse_tree], outfile) return outfile.getvalue(), True if dump == "markup": dump_markup([parse_tree], config, outfile) return outfile.getvalue(), True box_tree = formatter.layout_tree(parse_tree, config, first_token=first_token) if dump == "layout": formatter.dump_tree([box_tree], outfile) return outfile.getvalue(), True outstr = formatter.write_tree(box_tree, config, infile_content) if config.encode.emit_byteorder_mark: outstr = "\ufeff" + outstr return (outstr, box_tree.reflow_valid)
def annotate_file(config, infile, outfile, outfmt=None): """ Parse the input cmake file, re-format it, and print to the output file. """ infile_content = infile.read() if config.format.line_ending == 'auto': detected = __main__.detect_line_endings(infile_content) config = config.clone() config.format.set_line_ending(detected) tokens = lexer.tokenize(infile_content) parse_db = parse_funs.get_parse_db() parse_db.update(parse_funs.get_legacy_parse(config.parse.fn_spec).kwargs) ctx = parse.ParseContext(parse_db) parse_tree = parse.parse(tokens, ctx) if outfmt == "page": html_content = render.get_html(parse_tree, fullpage=True) outfile.write(html_content) return if outfmt == "stub": html_content = render.get_html(parse_tree, fullpage=False) outfile.write(html_content) return if outfmt == "iframe": html_content = render.get_html(parse_tree, fullpage=True) wrap_lines = EMBED_TPL.split("\n") for line in wrap_lines[:2]: outfile.write(line) outfile.write("\n") outfile.write(html_content) for line in wrap_lines[3:]: outfile.write(line) outfile.write("\n") return raise ValueError("Invalid output format: {}".format(outfmt))