コード例 #1
0
ファイル: gen_cmake.py プロジェクト: honza1a/v8
def FormatCMake(contents):
    from cmake_format import configuration, lexer, parse, formatter
    cfg = configuration.Configuration()
    tokens = lexer.tokenize(contents)
    parse_tree = parse.parse(tokens)
    box_tree = formatter.layout_tree(parse_tree, cfg)
    return formatter.write_tree(box_tree, cfg, contents)
コード例 #2
0
def process_file(config, infile, outfile, dump=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    infile_content = infile.read()
    if config.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return
    config.first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs)
    parse_tree = parser.parse(tokens, parse_db)
    if dump == "parse":
        parser.dump_tree([parse_tree], outfile)
        return
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return

    box_tree = formatter.layout_tree(parse_tree, config)
    if dump == "layout":
        formatter.dump_tree([box_tree], outfile)
        return

    text = formatter.write_tree(box_tree, config, infile_content)
    if config.emit_byteorder_mark:
        outfile.write("\ufeff")
    outfile.write(text)
コード例 #3
0
ファイル: __main__.py プロジェクト: vilyukh/cmake_format
def process_file(config, infile, outfile, dump=None):
  """
  Parse the input cmake file, re-format it, and print to the output file.
  """

  infile_content = infile.read()
  if config.line_ending == 'auto':
    detected = detect_line_endings(infile_content)
    config = config.clone()
    config.set_line_ending(detected)
  tokens = lexer.tokenize(infile_content)
  if dump == 'lex':
    for token in tokens:
      outfile.write('{}\n'.format(token))
    return
  config.first_token = lexer.get_first_non_whitespace_token(tokens)
  parse_tree = parser.parse(tokens, config.fn_spec)
  if dump == 'parse':
    parser.dump_tree([parse_tree], outfile)
    return
  box_tree = formatter.layout_tree(parse_tree, config)
  if dump == 'layout':
    formatter.dump_tree([box_tree], outfile)
    return
  text = formatter.write_tree(box_tree, config, infile_content)
  outfile.write(text)
コード例 #4
0
def process_file(config, infile_content, dump=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    outfile = io.StringIO(newline='')
    if config.format.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.format.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return outfile.getvalue(), True
    first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_funtree(config.parse.fn_spec))

    if dump == "parsedb":
        dump_parsedb(parse_db, outfile)
        return outfile.getvalue(), True

    ctx = parse.ParseContext(parse_db, config=config)
    parse_tree = parse.parse(tokens, ctx)
    if dump == "parse":
        dump_parse([parse_tree], outfile)
        return outfile.getvalue(), True
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return outfile.getvalue(), True

    box_tree = formatter.layout_tree(parse_tree,
                                     config,
                                     first_token=first_token)
    if dump == "layout":
        formatter.dump_tree([box_tree], outfile)
        return outfile.getvalue(), True

    outstr = formatter.write_tree(box_tree, config, infile_content)
    if config.encode.emit_byteorder_mark:
        outstr = "\ufeff" + outstr

    return (outstr, box_tree.reflow_valid)
コード例 #5
0
ファイル: __main__.py プロジェクト: tristan0x/cmake_format
def process_file(config, infile, outfile, dump=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    infile_content = infile.read()
    if config.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return
    config.first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_tree = parser.parse(tokens, config.fn_spec)
    if dump == "parse":
        parser.dump_tree([parse_tree], outfile)
        return
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return
    if dump == "html-page":
        html_content = render.get_html(parse_tree, fullpage=True)
        outfile.write(html_content)
        return
    if dump == "html-stub":
        html_content = render.get_html(parse_tree, fullpage=False)
        outfile.write(html_content)
        return

    box_tree = formatter.layout_tree(parse_tree, config)
    if dump == "layout":
        infile.seek(0)
        formatter.dump_tree([box_tree], outfile)
        return

    text = formatter.write_tree(box_tree, config, infile_content)
    if config.emit_byteorder_mark:
        outfile.write("\ufeff")
    outfile.write(text)
コード例 #6
0
ファイル: __main__.py プロジェクト: cheshirekow/cmake_format
def process_file(config, infile, outfile, dump=None):
  """
  Parse the input cmake file, re-format it, and print to the output file.
  """

  infile_content = infile.read()
  if config.line_ending == 'auto':
    detected = detect_line_endings(infile_content)
    config = config.clone()
    config.set_line_ending(detected)
  tokens = lexer.tokenize(infile_content)
  if dump == "lex":
    for token in tokens:
      outfile.write("{}\n".format(token))
    return
  config.first_token = lexer.get_first_non_whitespace_token(tokens)
  parse_tree = parser.parse(tokens, config.fn_spec)
  if dump == "parse":
    parser.dump_tree([parse_tree], outfile)
    return
  if dump == "markup":
    dump_markup([parse_tree], config, outfile)
    return
  if dump == "html-page":
    html_content = render.get_html(parse_tree, fullpage=True)
    outfile.write(html_content)
    return
  if dump == "html-stub":
    html_content = render.get_html(parse_tree, fullpage=False)
    outfile.write(html_content)
    return

  box_tree = formatter.layout_tree(parse_tree, config)
  if dump == "layout":
    infile.seek(0)
    formatter.dump_tree([box_tree], outfile)
    return

  text = formatter.write_tree(box_tree, config, infile_content)
  if config.emit_byteorder_mark:
    outfile.write("\ufeff")
  outfile.write(text)
コード例 #7
0
ファイル: __main__.py プロジェクト: kumekay/cmake_format
def process_file(config, infile_content, dump=None, extra=None):
    """
  Parse the input cmake file, re-format it, and print to the output file.
  """

    outfile = io.StringIO(newline='')
    if config.line_ending == 'auto':
        detected = detect_line_endings(infile_content)
        config = config.clone()
        config.set_line_ending(detected)
    tokens = lexer.tokenize(infile_content)
    if dump == "lex":
        for token in tokens:
            outfile.write("{}\n".format(token))
        return outfile.getvalue()
    config.first_token = lexer.get_first_non_whitespace_token(tokens)
    parse_db = parse_funs.get_parse_db()
    parse_db.update(parse_funs.get_legacy_parse(config.fn_spec).kwargs)
    parse_tree = parser.parse(tokens, parse_db)
    if dump == "parse":
        parser.dump_tree([parse_tree], outfile)
        return outfile.getvalue()
    if dump == "markup":
        dump_markup([parse_tree], config, outfile)
        return outfile.getvalue()

    box_tree = formatter.layout_tree(parse_tree, config)
    if dump == "layout":
        formatter.dump_tree([box_tree], outfile)
        return outfile.getvalue()

    if extra is not None:
        extra["reflow_valid"] = box_tree.reflow_valid

    outstr = formatter.write_tree(box_tree, config, infile_content)
    if config.emit_byteorder_mark:
        return "\ufeff" + outstr
    return outstr