Esempio n. 1
0
    def __init__(self,
                 arena,
                 parse_opts,
                 aliases,
                 oil_grammar,
                 trail=None,
                 one_pass_parse=False):
        # type: (Arena, OilParseOptions, Dict[str, Any], Grammar, Optional[_BaseTrail], bool) -> None
        self.arena = arena
        self.parse_opts = parse_opts
        self.aliases = aliases

        self.e_parser = expr_parse.ExprParser(self, oil_grammar)
        # NOTE: The transformer is really a pure function.
        if oil_grammar:
            self.tr = expr_to_ast.Transformer(oil_grammar)
            names = MakeGrammarNames(oil_grammar)
        else:  # hack for unit tests, which pass None
            self.tr = None
            names = {}

        self.parsing_expr = False  # "single-threaded" state

        # Completion state lives here since it may span multiple parsers.
        self.trail = trail or _NullTrail()
        self.one_pass_parse = one_pass_parse

        self.p_printer = expr_parse.ParseTreePrinter(names)  # print raw nodes
Esempio n. 2
0
def main(argv):
  # type: (List[str]) -> int
  arena = alloc.Arena()
  arena.PushSource(source.Stdin(''))

  loader = pyutil.GetResourceLoader()
  oil_grammar = meta.LoadOilGrammar(loader)

  parse_ctx = None
  e_parser = expr_parse.ExprParser(parse_ctx, oil_grammar)

  line_lexer = lexer.LineLexer('', arena)
  line_reader = reader.FileLineReader(sys.stdin, arena)
  lex = lexer.Lexer(line_lexer, line_reader)

  try:
    pnode, _ = e_parser.Parse(lex, grammar_nt.command_expr)
  except error.Parse as e:
    ui.PrettyPrintError(e, arena)
    return 2

  #print(pnode)
  tr = expr_to_ast.Transformer(oil_grammar)
  node = tr.Expr(pnode)

  assert node is not None

  tree = node.AbbreviatedTree()
  #tree = node.PrettyTree()

  ast_f = fmt.DetectConsoleOutput(sys.stdout)
  fmt.PrintTree(tree, ast_f)
  ast_f.write('\n')

  return 0
Esempio n. 3
0
    def __init__(self, arena, parse_opts, aliases, oil_grammar):
        # type: (Arena, optview.Parse, Dict[str, str], Grammar) -> None
        self.arena = arena
        self.parse_opts = parse_opts
        self.aliases = aliases

        self.e_parser = expr_parse.ExprParser(self, oil_grammar)
        # NOTE: The transformer is really a pure function.
        if oil_grammar:
            self.tr = expr_to_ast.Transformer(oil_grammar)
            if mylib.PYTHON:
                names = MakeGrammarNames(oil_grammar)
        else:  # hack for unit tests, which pass None
            self.tr = None
            if mylib.PYTHON:  # TODO: Simplify
                names = {}

        if mylib.PYTHON:
            self.p_printer = expr_parse.ParseTreePrinter(
                names)  # print raw nodes

        self.parsing_expr = False  # "single-threaded" state

        # Completion state lives here since it may span multiple parsers.
        self.trail = _NullTrail()  # type: _BaseTrail
        self.one_pass_parse = False
Esempio n. 4
0
def ParseDemo(oil_grammar):
    # type: (Grammar) -> None

    arena = alloc.Arena()
    arena.PushSource(source__Stdin(''))

    parse_ctx = None  # type: ParseContext
    e_parser = expr_parse.ExprParser(parse_ctx, oil_grammar)

    line_lexer = lexer.LineLexer('', arena)
    line_reader = reader.StringLineReader('1 + 2*3', arena)
    lex = lexer.Lexer(line_lexer, line_reader)

    try:
        pnode, _ = e_parser.Parse(lex, arith_nt.arith_expr)
    except error.Parse as e:
        #ui.PrettyPrintError(e, arena)
        log("Parse Error (TODO: print it)")
        return

    # TODO: Fill this in.  Oil uses parse_lib.MakeGrammarNames()
    #
    # terminals: _Id_str?  Doesn't work in mycpp
    # nonterminals: gr.number2symbol.  Is this ever used at runtime?
    #
    # Dict[int,str] should really be a List[str] then?

    if 0:
        names = {}  # type: Dict[int, str]
        printer = expr_parse.ParseTreePrinter(names)
        printer.Print(pnode)
        # NOTE: Could also transform

    # This only works for Oil
    if 0:
        tr = expr_to_ast.Transformer(oil_grammar)
        node = tr.Expr(pnode)

        assert node is not None

        tree = node.AbbreviatedTree()
        fmt.PrintTree(tree, mylib.Stdout())
Esempio n. 5
0
def main(argv):
  action = argv[1]
  argv = argv[2:]

  # Used at grammar BUILD time.
  OPS = {
      '.': Id.Expr_Dot,
      '->': Id.Expr_RArrow,
      '::': Id.Expr_DColon,

      '@': Id.Expr_At,
      '...': Id.Expr_Ellipsis,

      '$': Id.Expr_Dollar,  # Only for legacy eggex /d+$/
  }

  # Note: We have two lists of ops because Id.Op_Semi is used, not
  # Id.Arith_Semi.
  for _, token_str, id_ in lex.EXPR_OPS:
    assert token_str not in OPS, token_str
    OPS[token_str] = id_

  # Tokens that look like / or ${ or @{
  triples = (
      meta.ID_SPEC.LexerPairs(Kind.Arith) +
      lex.OIL_LEFT_SUBS +
      lex.OIL_LEFT_UNQUOTED +
      lex.EXPR_WORDS
  )
  more_ops = {}
  for _, token_str, id_ in triples:
    assert token_str not in more_ops, token_str
    more_ops[token_str] = id_

  # Tokens that look like 'for'
  keyword_ops = {}
  for _, token_str, id_ in lex.EXPR_WORDS:  # for, in, etc.
    assert token_str not in keyword_ops, token_str
    keyword_ops[token_str] = id_

  if 0:
    from pprint import pprint
    pprint(OPS)
    print('---')
    pprint(more_ops)
    print('---')
    pprint(keyword_ops)
    print('---')

  tok_def = OilTokenDef(OPS, more_ops, keyword_ops)

  if action == 'marshal':  # generate the grammar and parse it
    grammar_path = argv[0]
    out_dir = argv[1]

    basename, _ = os.path.splitext(os.path.basename(grammar_path))

    # HACK for find:
    if basename == 'find':
      from tools.find import tokenizer as find_tokenizer
      tok_def = find_tokenizer.TokenDef()

    with open(grammar_path) as f:
      gr = pgen.MakeGrammar(f, tok_def=tok_def)

    marshal_path = os.path.join(out_dir, basename + '.marshal')
    with open(marshal_path, 'wb') as out_f:
      gr.dump(out_f)

    nonterm_path = os.path.join(out_dir, basename + '_nt.py')
    with open(nonterm_path, 'w') as out_f:
      gr.dump_nonterminals(out_f)

    log('Compiled %s -> %s and %s', grammar_path, marshal_path, nonterm_path)
    #gr.report()

  elif action == 'parse':  # generate the grammar and parse it
    # Remove build dependency
    from frontend import parse_lib
    from oil_lang import expr_parse

    grammar_path = argv[0]
    start_symbol = argv[1]
    code_str = argv[2]

    # For choosing lexer and semantic actions
    grammar_name, _ = os.path.splitext(os.path.basename(grammar_path))

    with open(grammar_path) as f:
      gr = pgen.MakeGrammar(f, tok_def=tok_def)

    arena = alloc.Arena()
    lex_ = MakeOilLexer(code_str, arena)

    is_expr = grammar_name in ('calc', 'grammar')

    parse_opts = parse_lib.OilParseOptions()
    parse_ctx = parse_lib.ParseContext(arena, parse_opts, {}, gr)
    p = expr_parse.ExprParser(parse_ctx, gr)
    try:
      pnode, _ = p.Parse(lex_, gr.symbol2number[start_symbol])
    except parse.ParseError as e:
      log('Parse Error: %s', e)
      return 1

    names = parse_lib.MakeGrammarNames(gr)
    p_printer = expr_parse.ParseTreePrinter(names)  # print raw nodes
    p_printer.Print(pnode)

    if is_expr:
      from oil_lang import expr_to_ast
      tr = expr_to_ast.Transformer(gr)
      if start_symbol == 'eval_input':
        ast_node = tr.Expr(pnode)
      else:
        ast_node = tr.VarDecl(pnode)
      ast_node.PrettyPrint()
      print()

  elif action == 'stdlib-test':
    # This shows how deep Python's parse tree is.  It doesn't use semantic
    # actions to prune on the fly!

    import parser  # builtin module
    t = parser.expr('1+2')
    print(t)
    t2 = parser.st2tuple(t)
    print(t2)

  else:
    raise RuntimeError('Invalid action %r' % action)
Esempio n. 6
0
def main(argv):
  action = argv[1]
  argv = argv[2:]

  # Common initialization
  arith_ops = {}
  for _, token_str, id_ in meta.ID_SPEC.LexerPairs(Kind.Arith):
    arith_ops[token_str] = id_

  if 0:
    from pprint import pprint
    pprint(arith_ops)

  tok_def = OilTokenDef(arith_ops)

  if action == 'marshal':  # generate the grammar and parse it
    grammar_path = argv[0]
    out_dir = argv[1]

    basename, _ = os.path.splitext(os.path.basename(grammar_path))

    # HACK for find:
    if basename == 'find':
      from tools.find import tokenizer as find_tokenizer
      tok_def = find_tokenizer.TokenDef()

    with open(grammar_path) as f:
      gr = pgen.MakeGrammar(f, tok_def=tok_def)

    marshal_path = os.path.join(out_dir, basename + '.marshal')
    with open(marshal_path, 'wb') as out_f:
      gr.dump(out_f)

    nonterm_path = os.path.join(out_dir, basename + '_nt.py')
    with open(nonterm_path, 'w') as out_f:
      gr.dump_nonterminals(out_f)

    log('Compiled %s -> %s and %s', grammar_path, marshal_path, nonterm_path)
    #gr.report()

  elif action == 'parse':  # generate the grammar and parse it
    # Remove build dependency
    from frontend import parse_lib
    from oil_lang import expr_parse

    grammar_path = argv[0]
    start_symbol = argv[1]
    code_str = argv[2]

    # For choosing lexer and semantic actions
    grammar_name, _ = os.path.splitext(os.path.basename(grammar_path))

    with open(grammar_path) as f:
      gr = pgen.MakeGrammar(f, tok_def=tok_def)

    arena = alloc.Arena()
    lex = MakeOilLexer(code_str, arena)

    is_expr = grammar_name in ('calc', 'grammar')

    parse_opts = parse_lib.OilParseOptions()
    parse_ctx = parse_lib.ParseContext(arena, parse_opts, {}, gr)
    p = expr_parse.ExprParser(parse_ctx, gr)
    try:
      pnode, _ = p.Parse(lex, gr.symbol2number[start_symbol])
    except parse.ParseError as e:
      log('Parse Error: %s', e)
      return 1

    names = parse_lib.MakeGrammarNames(gr)
    p_printer = expr_parse.ParseTreePrinter(names)  # print raw nodes
    p_printer.Print(pnode)

    if is_expr:
      from oil_lang import expr_to_ast
      tr = expr_to_ast.Transformer(gr)
      if start_symbol == 'eval_input':
        ast_node = tr.Expr(pnode)
      else:
        ast_node = tr.OilAssign(pnode)
      ast_node.PrettyPrint()
      print()

  elif action == 'stdlib-test':
    # This shows how deep Python's parse tree is.  It doesn't use semantic
    # actions to prune on the fly!

    import parser  # builtin module
    t = parser.expr('1+2')
    print(t)
    t2 = parser.st2tuple(t)
    print(t2)

  else:
    raise RuntimeError('Invalid action %r' % action)