def InitParseContext(arena=None, oil_grammar=None, aliases=None): arena = arena or MakeArena('<test_lib>') if aliases is None: aliases = {} opt_array = [False] * option_i.ARRAY_SIZE parse_opts = optview.Parse(opt_array) parse_ctx = parse_lib.ParseContext(arena, parse_opts, aliases, oil_grammar) return parse_ctx
def __init__(self, script_path=None): # generally, defer work until we know the script loaded with (open(script_path) if script_path else sys.stdin) as script: arena = alloc.Arena() parse_ctx = parse_lib.ParseContext( arena=arena, parse_opts=optview.Parse(NO_OPTIONS), aliases={}, # dummy oil_grammar=None, ) parse_ctx.Init_OnePassParse(True) if script_path: # TODO: is there a real difference between using mainfile and # sourcedfile? (this gets re-used for sourced scripts) arena.PushSource(source.MainFile(script_path)) else: arena.PushSource(source.Stdin()) try: node = main_loop.ParseWholeFile( self._make_parser(parse_ctx, script, arena)) except error.Parse as e: ui.PrettyPrintError(e, arena) raise assert node is not None # actually initialize self.arena = arena # TODO: not certain we don't want more, but minimize for now self.aliases = set() self.builtins = defaultdict(list) self.commands = defaultdict(list) self.sources = defaultdict(list) self.funcs_defined = set() self.resolved_commands = dict() self.resolved_functions = dict() self.resolved_aliases = dict() self.resolved_source = dict() self.parsed_source = dict() self.unresolved_commands = set() # unresolved functions doesn't make sense because we can't disambiguate an unresolved function from an unresolved external command... self.unresolved_source = set() self.word_obs = dict() # "resolve" try: self.Visit(node) self.resolve_records() except ResolutionError as e: e.print_if_needed() raise
def InitWordParser(word_str, oil_at=False, arena=None): arena = arena or MakeArena('<test_lib>') opt_array = [False] * option_i.ARRAY_SIZE parse_opts = optview.Parse(opt_array) opt_array[option_i.parse_at] = oil_at loader = pyutil.GetResourceLoader() oil_grammar = meta.LoadOilGrammar(loader) parse_ctx = parse_lib.ParseContext(arena, parse_opts, {}, oil_grammar) line_reader, _ = InitLexer(word_str, arena) c_parser = parse_ctx.MakeOshParser(line_reader) # Hack return c_parser.w_parser
def InitWordEvaluator(): arena = MakeArena('<InitWordEvaluator>') mem = state.Mem('', [], arena, []) state.InitMem(mem, {}) opt_array = [False] * option_i.ARRAY_SIZE errexit = state._ErrExit() parse_opts = optview.Parse(opt_array) exec_opts = optview.Exec(opt_array, errexit) mem.exec_opts = exec_opts # circular dep exec_deps = cmd_exec.Deps() exec_deps.trap_nodes = [] splitter = split.SplitContext(mem) errfmt = ui.ErrorFormatter(arena) ev = word_eval.CompletionWordEvaluator(mem, exec_opts, splitter, errfmt) return ev
def TeaMain(argv): # type: (str, List[str]) -> int arena = alloc.Arena() try: script_name = argv[1] arena.PushSource(source.MainFile(script_name)) except IndexError: arena.PushSource(source.Stdin()) f = sys.stdin else: try: f = open(script_name) except IOError as e: stderr_line("tea: Couldn't open %r: %s", script_name, posix.strerror(e.errno)) return 2 aliases = {} # Dummy value; not respecting aliases! loader = pyutil.GetResourceLoader() oil_grammar = pyutil.LoadOilGrammar(loader) # Not used in tea, but OK... opt0_array = state.InitOpts() parse_opts = optview.Parse(opt0_array) # parse `` and a[x+1]=bar differently parse_ctx = parse_lib.ParseContext(arena, parse_opts, aliases, oil_grammar) line_reader = reader.FileLineReader(f, arena) try: parse_ctx.ParseTeaModule(line_reader) status = 0 except error.Parse as e: ui.PrettyPrintError(e, arena) status = 2 return status
ExternalThunk = process.ExternalThunk def Banner(msg): print('-' * 60) print(msg) # TODO: Put these all in a function. _ARENA = test_lib.MakeArena('process_test.py') _MEM = state.Mem('', [], _ARENA, []) state.InitMem(_MEM, {}, '0.1') _OPT_ARRAY = [False] * option_i.ARRAY_SIZE _PARSE_OPTS = optview.Parse(_OPT_ARRAY) _ERREXIT = state._ErrExit() _EXEC_OPTS = state.MutableOpts(_MEM, _OPT_ARRAY, _ERREXIT, None) _JOB_STATE = process.JobState() _WAITER = process.Waiter(_JOB_STATE, _EXEC_OPTS) _ERRFMT = ui.ErrorFormatter(_ARENA) _FD_STATE = process.FdState(_ERRFMT, _JOB_STATE) _EXT_PROG = process.ExternalProgram('', _FD_STATE, _ERRFMT, util.NullDebugFile()) def _CommandNode(code_str, arena): c_parser = test_lib.InitCommandParser(code_str, arena=arena) return c_parser.ParseLogicalLine()
def main(argv): # type: (List[str]) -> int arena = alloc.Arena() opt_array = [False] * option_i.ARRAY_SIZE parse_opts = optview.Parse(opt_array) # Dummy value; not respecting aliases! aliases = {} # type: Dict[str, str] # parse `` and a[x+1]=bar differently oil_grammar = None # type: Grammar if mylib.PYTHON: loader = pyutil.GetResourceLoader() oil_grammar = pyutil.LoadOilGrammar(loader) parse_ctx = parse_lib.ParseContext(arena, parse_opts, aliases, oil_grammar) pretty_print = True if len(argv) == 1: line_reader = reader.FileLineReader(mylib.Stdin(), arena) src = source.Stdin('') # type: source_t elif len(argv) == 2: path = argv[1] f = mylib.open(path) line_reader = reader.FileLineReader(f, arena) src = source.MainFile(path) elif len(argv) == 3: if argv[1] == '-c': # This path is easier to run through GDB line_reader = reader.StringLineReader(argv[2], arena) src = source.CFlag() elif argv[1] == '-n': # For benchmarking, allow osh_parse -n file.txt path = argv[2] f = mylib.open(path) line_reader = reader.FileLineReader(f, arena) src = source.MainFile(path) # This is like --ast-format none, which benchmarks/osh-helper.sh passes. pretty_print = False else: raise AssertionError() else: raise AssertionError() arena.PushSource(src) c_parser = parse_ctx.MakeOshParser(line_reader) try: #node = main_loop.ParseWholeFile(c_parser) node = ParseWholeFile(c_parser) except error.Parse as e: ui.PrettyPrintError(e, arena) return 2 assert node is not None # C++ doesn't have the abbreviations yet (though there are some differences # like omitting spids) #tree = node.AbbreviatedTree() if pretty_print: tree = node.PrettyTree() ast_f = fmt.DetectConsoleOutput(mylib.Stdout()) fmt.PrintTree(tree, ast_f) ast_f.write('\n') return 0
def OshCommandMain(argv): """Run an 'oshc' tool. 'osh' is short for "osh compiler" or "osh command". TODO: - oshc --help oshc deps --path: the $PATH to use to find executables. What about libraries? NOTE: we're leaving out su -c, find, xargs, etc.? Those should generally run functions using the $0 pattern. --chained-command sudo """ try: action = argv[0] except IndexError: raise error.Usage('Missing required subcommand.') if action not in SUBCOMMANDS: raise error.Usage('Invalid subcommand %r.' % action) if action == 'parse-glob': # Pretty-print the AST produced by osh/glob_.py print('TODO:parse-glob') return 0 if action == 'parse-printf': # Pretty-print the AST produced by osh/builtin_printf.py print('TODO:parse-printf') return 0 arena = alloc.Arena() try: script_name = argv[1] arena.PushSource(source.MainFile(script_name)) except IndexError: arena.PushSource(source.Stdin()) f = sys.stdin else: try: f = open(script_name) except IOError as e: stderr_line("oshc: Couldn't open %r: %s", script_name, posix.strerror(e.errno)) return 2 aliases = {} # Dummy value; not respecting aliases! loader = pyutil.GetResourceLoader() oil_grammar = pyutil.LoadOilGrammar(loader) opt0_array = state.InitOpts() no_stack = None # type: List[bool] # for mycpp opt_stacks = [no_stack] * option_i.ARRAY_SIZE # type: List[List[bool]] parse_opts = optview.Parse(opt0_array, opt_stacks) # parse `` and a[x+1]=bar differently parse_ctx = parse_lib.ParseContext(arena, parse_opts, aliases, oil_grammar) parse_ctx.Init_OnePassParse(True) line_reader = reader.FileLineReader(f, arena) c_parser = parse_ctx.MakeOshParser(line_reader) try: node = main_loop.ParseWholeFile(c_parser) except error.Parse as e: ui.PrettyPrintError(e, arena) return 2 assert node is not None f.close() # Columns for list-* # path line name # where name is the binary path, variable name, or library path. # bin-deps and lib-deps can be used to make an app bundle. # Maybe I should list them together? 'deps' can show 4 columns? # # path, line, type, name # # --pretty can show the LST location. # stderr: show how we're following imports? if action == 'translate': osh2oil.PrintAsOil(arena, node) elif action == 'arena': # for debugging osh2oil.PrintArena(arena) elif action == 'spans': # for debugging osh2oil.PrintSpans(arena) elif action == 'format': # TODO: autoformat code raise NotImplementedError(action) elif action == 'deps': deps.Deps(node) elif action == 'undefined-vars': # could be environment variables raise NotImplementedError() else: raise AssertionError # Checked above return 0
def main(argv): action = argv[1] argv = argv[2:] # Used at grammar BUILD time. OPS = { '.': Id.Expr_Dot, '->': Id.Expr_RArrow, '::': Id.Expr_DColon, '@': Id.Expr_At, '...': Id.Expr_Ellipsis, '$': Id.Expr_Dollar, # Only for legacy eggex /d+$/ } # Note: We have two lists of ops because Id.Op_Semi is used, not # Id.Arith_Semi. for _, token_str, id_ in lexer_def.EXPR_OPS: assert token_str not in OPS, token_str OPS[token_str] = id_ # Tokens that look like / or ${ or @{ triples = (lexer_def.ID_SPEC.LexerPairs(Kind.Arith) + lexer_def.OIL_LEFT_SUBS + lexer_def.OIL_LEFT_UNQUOTED + lexer_def.EXPR_WORDS) more_ops = {} for _, token_str, id_ in triples: if token_str in more_ops: import pprint raise AssertionError( '%r %s' % (token_str, pprint.pformat(more_ops, indent=2))) more_ops[token_str] = id_ # Tokens that look like 'for' keyword_ops = {} for _, token_str, id_ in lexer_def.EXPR_WORDS: # for, in, etc. assert token_str not in keyword_ops, token_str keyword_ops[token_str] = id_ if 0: from pprint import pprint pprint(OPS) print('---') pprint(more_ops) print('---') pprint(keyword_ops) print('---') tok_def = OilTokenDef(OPS, more_ops, keyword_ops) if action == 'marshal': # generate the grammar and parse it grammar_path = argv[0] out_dir = argv[1] basename, _ = os.path.splitext(os.path.basename(grammar_path)) # HACK for find: if basename == 'find': from tools.find import tokenizer as find_tokenizer tok_def = find_tokenizer.TokenDef() with open(grammar_path) as f: gr = pgen.MakeGrammar(f, tok_def=tok_def) marshal_path = os.path.join(out_dir, basename + '.marshal') with open(marshal_path, 'wb') as out_f: gr.dump(out_f) nonterm_py = os.path.join(out_dir, basename + '_nt.py') with open(nonterm_py, 'w') as out_f: gr.dump_nonterminals_py(out_f) nonterm_h = os.path.join(out_dir, basename + '_nt.h') with open(nonterm_h, 'w') as out_f: gr.dump_nonterminals_cpp(out_f) log('%s -> (oil_lang/grammar_gen) -> %s/%s{.marshal,_nt.py,_nt.h}', grammar_path, out_dir, basename) #gr.report() elif action == 'parse': # generate the grammar and parse it # Remove build dependency from frontend import parse_lib from oil_lang import expr_parse grammar_path = argv[0] start_symbol = argv[1] code_str = argv[2] # For choosing lexer and semantic actions grammar_name, _ = os.path.splitext(os.path.basename(grammar_path)) with open(grammar_path) as f: gr = pgen.MakeGrammar(f, tok_def=tok_def) arena = alloc.Arena() lex_ = MakeOilLexer(code_str, arena) is_expr = grammar_name in ('calc', 'grammar') parse_opts = optview.Parse() parse_ctx = parse_lib.ParseContext(arena, parse_opts, {}, gr) p = expr_parse.ExprParser(parse_ctx, gr) try: pnode, _ = p.Parse(lex_, gr.symbol2number[start_symbol]) except parse.ParseError as e: log('Parse Error: %s', e) return 1 names = parse_lib.MakeGrammarNames(gr) p_printer = expr_parse.ParseTreePrinter(names) # print raw nodes p_printer.Print(pnode) if is_expr: from oil_lang import expr_to_ast tr = expr_to_ast.Transformer(gr) if start_symbol == 'eval_input': ast_node = tr.Expr(pnode) else: ast_node = tr.VarDecl(pnode) ast_node.PrettyPrint() print() elif action == 'stdlib-test': # This shows how deep Python's parse tree is. It doesn't use semantic # actions to prune on the fly! import parser # builtin module t = parser.expr('1+2') print(t) t2 = parser.st2tuple(t) print(t2) else: raise RuntimeError('Invalid action %r' % action)
def Banner(msg): print('-' * 60) print(msg) # TODO: Put these all in a function. _ARENA = test_lib.MakeArena('process_test.py') _MEM = state.Mem('', [], _ARENA, []) state.InitMem(_MEM, {}, '0.1') _OPT0_ARRAY = [False] * option_i.ARRAY_SIZE _OPT_STACKS = [False] * option_i.ARRAY_SIZE _PARSE_OPTS = optview.Parse(_OPT0_ARRAY, _OPT_STACKS) _EXEC_OPTS = state.MutableOpts(_MEM, _OPT0_ARRAY, _OPT_STACKS, None) _JOB_STATE = process.JobState() _WAITER = process.Waiter(_JOB_STATE, _EXEC_OPTS) _ERRFMT = ui.ErrorFormatter(_ARENA) _FD_STATE = process.FdState(_ERRFMT, _JOB_STATE) _EXT_PROG = process.ExternalProgram('', _FD_STATE, _ERRFMT, util.NullDebugFile()) def _CommandNode(code_str, arena): c_parser = test_lib.InitCommandParser(code_str, arena=arena) return c_parser.ParseLogicalLine() def _ExtProc(argv):
def Main(arg_r): # type: (args.Reader) -> int """ Usage: tea myprog.tea # Run the script tea -c 'func main() { echo "hi" }' # Run this snippet. Not common since # there's no top level statementes! # Use bin/oil for that. tea -n -c 'var x = 1' # Parse it # Compile to C++. Produce myprog.cc and myprog.h. tea --cpp-out myprog myprog.tea lib.tea """ try: attrs = flag_spec.ParseOil('tea_main', arg_r) except error.Usage as e: stderr_line('tea usage error: %s', e.msg) return 2 arg = arg_types.tea_main(attrs.attrs) arena = alloc.Arena() if arg.c is not None: arena.PushSource(source.CFlag()) line_reader = reader.StringLineReader(arg.c, arena) # type: reader._Reader else: script_name = arg_r.Peek() if script_name is None: arena.PushSource(source.Stdin()) f = mylib.Stdin() # type: mylib.LineReader else: arena.PushSource(source.MainFile(script_name)) try: # Hack for type checking. core/shell.py uses fd_state.Open(). f = cast('mylib.LineReader', open(script_name)) except IOError as e: stderr_line("tea: Couldn't open %r: %s", script_name, posix.strerror(e.errno)) return 2 line_reader = reader.FileLineReader(f, arena) # Dummy value; not respecting aliases! aliases = {} # type: Dict[str, str] loader = pyutil.GetResourceLoader() oil_grammar = pyutil.LoadOilGrammar(loader) # Not used in tea, but OK... opt0_array = state.InitOpts() no_stack = None # type: List[bool] # for mycpp opt_stacks = [no_stack] * option_i.ARRAY_SIZE # type: List[List[bool]] parse_opts = optview.Parse(opt0_array, opt_stacks) # parse `` and a[x+1]=bar differently parse_ctx = parse_lib.ParseContext(arena, parse_opts, aliases, oil_grammar) if arg.n: try: parse_ctx.ParseTeaModule(line_reader) status = 0 except error.Parse as e: ui.PrettyPrintError(e, arena) status = 2 else: e_usage("Tea doesn't run anything yet. Pass -n to parse.") return status