示例#1
0
def process_file(config, infile, outfile, dump=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    infile_content = infile.read()
    if config.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return
    config.first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs)
    parse_tree = parser.parse(tokens, parse_db)
    if dump == "parse":
        parser.dump_tree([parse_tree], outfile)
        return
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return

    box_tree = formatter.layout_tree(parse_tree, config)
    if dump == "layout":
        formatter.dump_tree([box_tree], outfile)
        return

    text = formatter.write_tree(box_tree, config, infile_content)
    if config.emit_byteorder_mark:
        outfile.write("\ufeff")
    outfile.write(text)
示例#2
0
def annotate_file(config, infile, outfile, outfmt=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    infile_content = infile.read()
    if config.line_ending == 'auto':
        detected = __main__.detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    config.first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs)
    parse_tree = parser.parse(tokens, parse_db)

    if outfmt == "page":
        html_content = render.get_html(parse_tree, fullpage=True)
        outfile.write(html_content)
        return
    if outfmt == "stub":
        html_content = render.get_html(parse_tree, fullpage=False)
        outfile.write(html_content)
        return
    raise ValueError("Invalid output format: {}".format(outfmt))
示例#3
0
def main():
    parser = argparse.ArgumentParser(description=__doc__)
    parser.add_argument("outfile", nargs="?", default="-")
    args = parser.parse_args()

    missing_commands = []
    proc = subprocess.Popen(["cmake", "--help-command-list"],
                            stdout=subprocess.PIPE)

    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(commands.get_fn_spec()).kwargs)

    with proc.stdout as infile:
        for line in infile:
            command = line.strip().decode("utf-8")
            if command not in parse_db and not command.startswith("end"):
                missing_commands.append(command)
    proc.wait()

    outfile_path = args.outfile
    if outfile_path == "-":
        outfile_path = os.dup(sys.stdout.fileno())

    with io.open(outfile_path, "w", encoding="utf-8") as outfile:
        for command in missing_commands:
            outfile.write(TEMPLATE.format(command))

        outfile.write("\n\ndef populate_db(parse_db):\n")
        for command in missing_commands:
            outfile.write('  parse_db["{0}"] = parse_{0}\n'.format(command))
示例#4
0
def parse(tokens, parse_db=None):
    """
  digest tokens, then layout the digested blocks.
  """
    if parse_db is None:
        from cmake_format import parse_funs
        parse_db = parse_funs.get_parse_db()

    return consume_body(tokens, parse_db)
示例#5
0
    def __init__(self, parse_db=None, lint_ctx=None, config=None):
        if parse_db is None:
            from cmake_format import parse_funs
            parse_db = parse_funs.get_parse_db()
        self.parse_db = parse_db

        if lint_ctx is None:
            lint_ctx = MockEverything()
        self.lint_ctx = lint_ctx

        if config is None:
            config = configuration.Configuration()
        self.config = config
示例#6
0
    def setUp(self):
        self.config = configuration.Configuration()
        parse_db = parse_funs.get_parse_db()
        self.parse_ctx = parse.ParseContext(parse_db)
        self.config.parse.fn_spec.add('foo',
                                      flags=['BAR', 'BAZ'],
                                      kwargs={
                                          "HEADERS": '*',
                                          "SOURCES": '*',
                                          "DEPENDS": '*'
                                      })

        self.parse_ctx.parse_db.update(
            parse_funs.get_funtree(self.config.parse.fn_spec))
示例#7
0
def process_file(config, infile_content):
  """
  Parse the input cmake file, return the parse tree
  """

  if config.format.line_ending == 'auto':
    detected = __main__.detect_line_endings(infile_content)
    config = config.clone()
    config.set_line_ending(detected)

  tokens = lexer.tokenize(infile_content)
  parse_db = parse_funs.get_parse_db()
  parse_db.update(parse_funs.get_legacy_parse(config.parse.fn_spec).kwargs)
  ctx = parse.ParseContext(parse_db, config=config)
  parse_tree = parse.parse(tokens, ctx)
  parse_tree.build_ancestry()
  return parse_tree
示例#8
0
def process_file(config, local_ctx, infile_content):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    if config.format.line_ending == 'auto':
        detected = __main__.detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)

    basic_checker.check_basics(config, local_ctx, infile_content)
    tokens = lexer.tokenize(infile_content)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(config.parse.fn_spec).kwargs)
    ctx = parse.ParseContext(parse_db, local_ctx, config)
    parse_tree = parse.parse(tokens, ctx)
    parse_tree.build_ancestry()
    basic_checker.check_parse_tree(config, local_ctx, parse_tree)
示例#9
0
    def __init__(self, parse_db=None, lint_ctx=None, config=None):
        if parse_db is None:
            from cmake_format import parse_funs
            parse_db = parse_funs.get_parse_db()
        self.parse_db = parse_db

        if lint_ctx is None:
            lint_ctx = MockEverything()
        self.lint_ctx = lint_ctx

        if config is None:
            from cmake_format import configuration
            config = configuration.Configuration()
        self.config = config

        # List of currently open parse nodes. Only used by nodes below
        # the statement level.
        self.argstack = []
示例#10
0
  def test_all_commands_in_db(self):
    missing_commands = []
    proc = subprocess.Popen(
        ["cmake", "--help-command-list"],
        stdout=subprocess.PIPE)

    parse_db = parse_funs.get_parse_db()

    ignore = IGNORE_LIST
    with proc.stdout as infile:
      for line in infile:
        command = line.strip().decode("utf-8")
        if command not in parse_db and command not in ignore:
          missing_commands.append(command)
    proc.wait()

    message = "Missing commands:\n  " + "\n  ".join(sorted(missing_commands))
    self.assertFalse(bool(missing_commands), msg=message)
示例#11
0
    def __init__(self, *args, **kwargs):
        super(TestBase, self).__init__(*args, **kwargs)
        self.config = configuration.Configuration()
        self.parse_db = parse_funs.get_parse_db()
        self.source_str = None
        self.expect_lex = None
        self.expect_parse = None
        self.expect_layout = None
        self.expect_format = None

        # NOTE(josh): hacky introspective way of automatically calling
        # assertExpectations() at the end of every test_XXX() function
        for name in dir(self):
            if not name.startswith("test_"):
                continue
            value = getattr(self, name)
            if callable(value):
                setattr(self, name, WrapTestWithRunFun(self, value))
示例#12
0
def process_file(config, infile_content, dump=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    outfile = io.StringIO(newline='')
    if config.format.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.format.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return outfile.getvalue(), True
    first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_funtree(config.parse.fn_spec))

    if dump == "parsedb":
        dump_parsedb(parse_db, outfile)
        return outfile.getvalue(), True

    ctx = parse.ParseContext(parse_db, config=config)
    parse_tree = parse.parse(tokens, ctx)
    if dump == "parse":
        dump_parse([parse_tree], outfile)
        return outfile.getvalue(), True
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return outfile.getvalue(), True

    box_tree = formatter.layout_tree(parse_tree,
                                     config,
                                     first_token=first_token)
    if dump == "layout":
        formatter.dump_tree([box_tree], outfile)
        return outfile.getvalue(), True

    outstr = formatter.write_tree(box_tree, config, infile_content)
    if config.encode.emit_byteorder_mark:
        outstr = "\ufeff" + outstr

    return (outstr, box_tree.reflow_valid)
示例#13
0
def process_file(config, infile_content, dump=None, extra=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    outfile = io.StringIO(newline='')
    if config.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return outfile.getvalue()
    config.first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs)
    parse_tree = parser.parse(tokens, parse_db)
    if dump == "parse":
        parser.dump_tree([parse_tree], outfile)
        return outfile.getvalue()
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return outfile.getvalue()

    box_tree = formatter.layout_tree(parse_tree, config)
    if dump == "layout":
        formatter.dump_tree([box_tree], outfile)
        return outfile.getvalue()

    if extra is not None:
        extra["reflow_valid"] = box_tree.reflow_valid

    outstr = formatter.write_tree(box_tree, config, infile_content)
    if config.emit_byteorder_mark:
        return "\ufeff" + outstr
    return outstr
示例#14
0
def annotate_file(config, infile, outfile, outfmt=None):
  """
  Parse the input cmake file, re-format it, and print to the output file.
  """

  infile_content = infile.read()
  if config.format.line_ending == 'auto':
    detected = __main__.detect_line_endings(infile_content)
    config = config.clone()
    config.format.set_line_ending(detected)
  tokens = lexer.tokenize(infile_content)
  parse_db = parse_funs.get_parse_db()
  parse_db.update(parse_funs.get_funtree(config.parse.fn_spec))
  ctx = parse.ParseContext(parse_db)
  parse_tree = parse.parse(tokens, ctx)

  if outfmt == "page":
    html_content = render.get_html(parse_tree, fullpage=True)
    outfile.write(html_content)
    return
  if outfmt == "stub":
    html_content = render.get_html(parse_tree, fullpage=False)
    outfile.write(html_content)
    return
  if outfmt == "iframe":
    html_content = render.get_html(parse_tree, fullpage=True)
    wrap_lines = EMBED_TPL.split("\n")
    for line in wrap_lines[:2]:
      outfile.write(line)
      outfile.write("\n")
    outfile.write(html_content)
    for line in wrap_lines[3:]:
      outfile.write(line)
      outfile.write("\n")
    return

  raise ValueError("Invalid output format: {}".format(outfmt))
示例#15
0
def inner_main():
    """Parse arguments, open files, start work."""
    logging.basicConfig(level=logging.INFO, format="%(levelname)s %(message)s")

    argparser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        usage=USAGE_STRING)

    setup_argparse(argparser)
    args = argparser.parse_args()
    logging.getLogger().setLevel(getattr(logging, args.log_level.upper()))

    if args.outfile_path is None:
        args.outfile_path = '-'

    if '-' in args.infilepaths:
        assert len(args.infilepaths) == 1, \
            "You cannot mix stdin as an input with other input files"

    if args.outfile_path == '-':
        outfile = io.open(os.dup(sys.stdout.fileno()),
                          mode='w',
                          encoding="utf-8",
                          newline='')
    else:
        outfile = io.open(args.outfile_path, 'w', encoding="utf-8", newline='')

    returncode = 0

    cfg = configuration.Configuration()
    collector = NameCollector()
    for infile_path in args.infilepaths:
        # NOTE(josh): have to load config once for every file, because we may pick
        # up a new config file location for each path
        if infile_path == '-':
            infile_path = os.dup(sys.stdin.fileno())

        try:
            infile = io.open(infile_path,
                             mode='r',
                             encoding=cfg.encode.input_encoding,
                             newline='')
        except (IOError, OSError):
            logger.error("Failed to open %s for read", infile_path)
            returncode = 1
            continue

        try:
            with infile:
                infile_content = infile.read()
        except UnicodeDecodeError:
            logger.error("Unable to read %s as %s", infile_path,
                         cfg.encode.input_encoding)
            returncode = 1
            continue

        tokens = lexer.tokenize(infile_content)
        parse_db = parse_funs.get_parse_db()
        ctx = parse.ParseContext(parse_db, config=cfg)
        parse_tree = parse.parse(tokens, ctx)
        parse_tree.build_ancestry()
        collector.collect_names(parse_tree)

    regexes = [
        re.compile(pattern) for pattern in [
            r"[A-Z][A-Z0-9_]+",  # upper snake-case
            r"[a-z][a-z0-9_]+",  # lower snake-case
            r"_[A-Z0-9_]+",  # upper snake-case with underscore prefix
            r"_[a-z0-9_]+",  # lower snake-case with underscore prefix
        ]
    ]

    outmap = {}
    patmap = {}
    for scope, varname in sorted(collector.varnames):
        if scope not in outmap:
            outmap[scope] = {}

        if scope not in patmap:
            patmap[scope] = {}
            for regex in regexes:
                patmap[scope][str(regex)] = 0
            patmap[scope]["other"] = 0

        for regex in regexes:
            if regex.match(varname):
                patmap[scope][str(regex)] += 1
                break
        else:
            patmap[scope]["other"] += 1

        if varname not in outmap[scope]:
            outmap[scope][varname] = 0
        outmap[scope][varname] += 1

    for scope, countmap in sorted(outmap.items()):
        outfile.write("\n{}\n{}\n".format(scope.name, "=" * len(scope.name)))
        for varname, count in sorted(countmap.items()):
            outfile.write("{}: {}\n".format(varname, count))

    for scope, countmap in sorted(patmap.items()):
        outfile.write("\n{}\n{}\n".format(scope.name, "=" * len(scope.name)))
        for varname, count in sorted(countmap.items()):
            outfile.write("{}: {}\n".format(varname, count))

    outfile.close()
    return returncode
示例#16
0
 def __init__(self, *args, **kwargs):
     super(TestCanonicalLayout, self).__init__(*args, **kwargs)
     self.config = configuration.Configuration()
     self.parse_db = parse_funs.get_parse_db()
示例#17
0
 def __init__(self, *args, **kwargs):
   super(TestCanonicalParse, self).__init__(*args, **kwargs)
   self.config = configuration.Configuration()
   parse_db = parse_funs.get_parse_db()
   self.parse_ctx = parse.ParseContext(parse_db)