Esempio n. 1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("grammar_file")
    parser.add_argument("tokens_file")
    parser.add_argument("output_file")
    parser.add_argument("--verbose", action="store_true")
    parser.add_argument("--debug", action="store_true")

    args = parser.parse_args()

    with open(args.grammar_file) as file:
        tokenizer = Tokenizer(tokenize.generate_tokens(file.readline),
                              verbose=args.verbose)
        parser = GrammarParser(tokenizer, verbose=args.verbose)
        grammar = parser.start()

    if not grammar:
        sys.exit("Failed to generate grammar")

    with open(args.tokens_file, "r") as tok_file:
        all_tokens, exact_tokens, non_exact_tokens = generate_token_definitions(
            tok_file)

    with open(args.output_file, "w") as file:
        gen = JavaParserGenerator(grammar,
                                  all_tokens,
                                  exact_tokens,
                                  non_exact_tokens,
                                  file,
                                  debug=args.debug)
        gen.generate(
            os.path.relpath(args.grammar_file,
                            os.path.dirname(args.output_file)))
Esempio n. 2
0
def build_parser(
    grammar_file: str, verbose_tokenizer: bool = False, verbose_parser: bool = False
) -> Tuple[Grammar, Parser, Tokenizer]:
    with open(grammar_file) as file:
        tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer)
        parser = GrammarParser(tokenizer, verbose=verbose_parser)
        grammar = parser.start()

        if not grammar:
            raise parser.make_syntax_error(grammar_file)

    return grammar, parser, tokenizer
Esempio n. 3
0
def main():
    if len(sys.argv) == 3:
        grammar_files = map(lambda f: open(f), sys.argv[1:])
    elif len(sys.argv) == 2 and not sys.stdin.isatty():
        grammar_files = [sys.stdin, open(sys.argv[1])]
    else:
        sys.exit("\n".join([
            "Usage:", f"\t\t{sys.argv[0]} GRAMMAR_FILE_OLD GRAMMAR_FILE_NEW",
            "\tor",
            f"\t\tcat GRAMMAR_FILE_OLD | {sys.argv[0]} GRAMMAR_FILE_NEW"
        ]))

    grammars = []
    for grammar_file in grammar_files:
        with grammar_file as file:
            tokenizer = Tokenizer(tokenize.generate_tokens(file.readline))
            parser = GrammarParser(tokenizer)
            grammar = parser.start()
            if not grammar:
                sys.exit(f"Failed to parse {grammar_file}")
            grammars.append(grammar)

    DiffVisitor(*grammars).diff()