def terminal(self, token): # # Homogeneous AST. # rv = AST(token.type) rv.attr = token.attr return rv
def parse_call(stream, callee): call = AST('call', [callee]) stream.advance('lparen') stream.ignore('newline') while not stream.is_category('rparen'): call.append(parse_expression(stream)) stream.ignore('symbol', string=',') stream.ignore('newline') stream.advance('rparen') return call
def parse_expr(feed): stmt = parse_expr10(feed) if match_term(feed): stmt = AST('call', [stmt]) while match_term(feed): stmt.append(parse_expr10(feed)) if feed.match('symbol', ('=', ':=')): token = feed.advance('symbol', ('=', ':=')) return AST('op', [stmt, parse_expr(feed)], token.string) return stmt
def parse_argv(stream): argv = AST('argv', []) stream.advance('lparen') stream.ignore('newline') while not stream.is_category('rparen'): argv.append(parse_identifier(stream)) stream.ignore('symbol', string=',') stream.ignore('newline') stream.advance('rparen') return argv
def parse_term(feed): if feed.match('keyword', 'def'): token = feed.advance() argv = AST('argv', []) while feed.match('identifier'): identifier = feed.advance() argv.append(AST('identifier', (), identifier.string, identifier.source)) return AST('def', [argv], source=token.source) token = feed.advance(term_types) if token.type in ('identifier', 'number', 'string'): return AST(token.type, (), token.string, token.source) if token.type == 'lparen': stmt = parse_expr(feed) feed.advance('rparen') return stmt raise Exception("FAIL")
def p_single_input(self, args): ''' single_input ::= END single_input ::= sleep_commands END single_input ::= chained_commands END ''' if len(args) > 0 and not self.sleeping: return args[0] else: return AST('')
def p_chained_commands(self, args): ''' chained_commands ::= single_command chained_commands ::= single_command chained_commands ''' if (len(args) == 1): return AST('chain', None, [args[0]]) else: args[1].children.insert(0, args[0]) return args[1]
def p_single_input_discard_junk(self, args): ''' single_input_discard_junk ::= END single_input_discard_junk ::= junk_tokens sleep_commands END single_input_discard_junk ::= junk_tokens chained_commands END ''' if len(args) > 1 and not self.sleeping: return args[1] else: return AST('')
def set_ctx_and_ops(py_node: ast.AST, merged_types: List[type]) -> None: """ :param py_node: Python ast tree node :param merged_types: type of operators or context extracted from XML tag """ if not merged_types: return if 'ctx' in py_node._fields: py_node.ctx = merged_types[0]() elif 'op' in py_node._fields: py_node.op = merged_types[0]() elif 'ops' in py_node._fields: py_node.ops = [op_type() for op_type in merged_types] else: log_and_raise_error(f'Failed to restore context or operators', logger, RuntimeError)
def convert_tree_to_ssa(tree: ast.AST, defn_env: dict): tree.decorator_list = ast_utils.filter_decorator(ssa, tree.decorator_list, defn_env) # tree = MoveReturn().visit(tree) # tree.body.append( # ast.Return(ast.Name("__magma_ssa_return_value", ast.Load()))) ssa_visitor = SSAVisitor() tree = ssa_visitor.visit(tree) return_transformer = TransformReturn() tree = return_transformer.visit(tree) num_return_values = len(ssa_visitor.return_values) for i in reversed(range(num_return_values)): conds = ssa_visitor.return_values[i] name = f"__magma_ssa_return_value_{i}" if i == num_return_values - 1 or not conds: if isinstance(tree.returns, ast.Tuple): tree.body.append( ast.Assign([ ast.Tuple([ ast.Name(f"O{i}", ast.Store()) for i in range(len(tree.returns.elts)) ], ast.Store()) ], ast.Name(name, ast.Load()))) else: tree.body.append( ast.Assign([ast.Name("O", ast.Load)], ast.Name(name, ast.Load()))) else: cond = conds[-1] for c in conds[:-1]: c = ast.BinOp(cond, ast.And(), c) if isinstance(tree.returns, ast.Tuple): for i in range(len(tree.returns.elts)): tree.body.append( ast.Assign( [ast.Name(f"O{i}", ast.Store())], ast.Call(ast.Name("phi", ast.Load()), [ ast.List([ ast.Name(f"O{i}", ast.Load()), ast.Subscript(ast.Name(name, ast.Load()), ast.Index(ast.Num(i)), ast.Load()) ], ast.Load()), cond ], []))) else: tree.body.append( ast.Assign([ast.Name("O", ast.Store())], ast.Call(ast.Name("phi", ast.Load()), [ ast.List([ ast.Name("O", ast.Load()), ast.Name(name, ast.Load()) ], ast.Load()), cond ], []))) return tree, ssa_visitor.args
def _make_green(self, node: ast.AST): """Turn a node green. If it's a function, add it to the green functions list. """ if is_green(node): return node.color = True self.changes_made = True if isinstance(node, ast.FunctionDef): self.green_functions.add(node.name)
def p_sleep_commands(self, args): ''' sleep_commands ::= go to sleep sleep_commands ::= start listening ''' if args[-1].type == 'sleep': self.sleeping = True print 'Going to sleep.' else: self.sleeping = False print 'Waking from sleep' return AST('')
def _node_with_elts(node: ast.AST, new_elts: typ.List[ast.expr]) -> ast.expr: if isinstance(node, ast.Call): node.args = new_elts return node elif isinstance(node, ast.List): return ast.List(elts=new_elts) elif isinstance(node, ast.Set): return ast.Set(elts=new_elts) elif isinstance(node, ast.Tuple): return ast.Tuple(elts=new_elts) else: raise TypeError(f"Unexpected node type {type(node)}")
def test_DeAST_has_source(): """Is a DeAST's source attribute filled when visit() is called?""" from ast import AST from deast import DeAST deaster = DeAST() assert hasattr(deaster, 'source') assert deaster.source is None deaster.visit(AST()) assert deaster.source is not None
def p_modifiers(self, args): ''' modifiers ::= control single_command modifiers ::= alt single_command modifiers ::= alternative single_command ''' value = {'control': 'ctrl', 'alt': 'alt', 'alternative': 'alt'} if (args[1].type == 'mod_plus_key'): args[1].meta.insert(0, value[args[0].type]) return args[1] else: return AST('mod_plus_key', [value[args[0].type]], [args[1]])
def debugger(self): self.tableWidget.setRowCount(0) self.tableWidget.setRowCount(100) self.tableWidget.setItem(0, 0, QTableWidgetItem("No.")) self.tableWidget.setItem(0, 1, QTableWidgetItem("Simbolo")) self.tableWidget.setItem(0, 2, QTableWidgetItem("Valor")) if (self.hilo_terminado): sys.setrecursionlimit(2147483644) self.consola.clear() ReporteErrores.func(None, True) g.func(0, None) g.textoEntrada = self.editor.text() instrucciones = g.parse(self.editor.text()) self.instrucciones = instrucciones ts_global = TS.Entorno(None) ast = AST.AST(instrucciones) declaracion1 = Declaracion.Declaracion('$ra', 0, 0, 0, "", "GLOBAL") declaracion2 = Declaracion.Declaracion('$sp', 0, 0, 0, "", "GLOBAL") declaracion1.ejecutar(ts_global, ast, self, True) declaracion2.ejecutar(ts_global, ast, self, True) bandera = False if (instrucciones != None): for ins in instrucciones: try: if (bandera == False and ins.id != "main"): error = Error.Error( "SEMANTICO", "Error semantico, La primera etiqueta debe ser la etiqueta main:", ins.linea, ins.columna) ReporteErrores.func(error) break else: bandera = True if (ast.existeEtiqueta(ins)): error = Error.Error( "SEMANTICO", "Error semantico, Ya existe la etiqueta " + ins.id, ins.linea, ins.columna) ReporteErrores.func(error) else: ast.agregarEtiqueta(ins) except: pass self.ts_global = ts_global self.ast = ast self.listado_gramatical = g.func(1, None).copy() self.debug()
def create_new_ast(ast): new_ast = AST('body') #[] tokens_java = [ Type.OPEN_CURLY_BRACE, Type.SEMICOLON, Type.CLOSE_CURLY_BRACE, Type.TAB ] tokens_java_val = ['System', '.', 'out'] for i in range(len(ast.params)): if type(ast.params[i]) != Node: if ast.params[i].type not in tokens_java: if ast.params[i].value == 'println': node.params.append( (Token)(ast.params[i].begin, ast.params[i].end, '\nprint', Type.IDENTIFIER)) elif ast.params[i].value not in tokens_java_val: if ast.params[i].type not in tokens_java: node.params.append(ast.params[i]) else: node = Node(ast.params[i].type, ast.params[i].name) if type(ast.params[i].params[0]) is list: index = [] for j in range(len(ast.params[i].params[0])): index.append(ast.params[i].params[0][j]) node.params.append(index) for j in range(1, len(ast.params[i].params)): if ast.params[i].params[j].value == 'println': #node.params.append(ast[i].params[j]) node.params.append( (Token)(ast.params[i].params[j].begin, ast.params[i].params[j].end, 'print', Type.IDENTIFIER)) elif ast.params[i].params[j].value not in tokens_java_val: if ast.params[i].params[j].type not in tokens_java: node.params.append(ast.params[i].params[j]) else: for j in range(len(ast.params[i].params)): if ast.params[i].params[j].type not in tokens_java: node.params.append(ast.params[i].params[j]) new_ast.params.append(node) return new_ast
def p_character(self, args): ''' character ::= act character ::= colon character ::= single quote character ::= double quote character ::= equal character ::= space character ::= tab character ::= bang character ::= hash character ::= dollar character ::= percent character ::= carrot character ::= ampersand character ::= star character ::= late character ::= rate character ::= minus character ::= underscore character ::= plus character ::= backslash character ::= dot character ::= slash character ::= question ''' value = { 'act': 'Escape', 'colon': 'colon', 'single': 'apostrophe', 'double': 'quotedbl', 'equal': 'equal', 'space': 'space', 'tab': 'Tab', 'bang': 'exclam', 'hash': 'numbersign', 'dollar': 'dollar', 'percent': 'percent', 'carrot': 'caret', 'ampersand': 'ampersand', 'star': 'asterisk', 'late': 'parenleft', 'rate': 'parenright', 'minus': 'minus', 'underscore': 'underscore', 'plus': 'plus', 'backslash': 'backslash', 'dot': 'period', 'slash': 'slash', 'question': 'question' } return AST('raw_char', [value[args[0].type]])
def _node_with_binop(node: ast.AST, binop: ast.BinOp) -> ast.expr: if isinstance(node, ast.Call): node.args = [ast.Starred(value=binop, ctx=ast.Load())] return node elif isinstance(node, ast.List): # NOTE (mb 2018-06-29): Operands of the binop are always lists return binop elif isinstance(node, ast.Set): return ast.Call(func=ast.Name(id="set", ctx=ast.Load()), args=[binop], keywords=[]) elif isinstance(node, ast.Tuple): return ast.Call(func=ast.Name(id="tuple", ctx=ast.Load()), args=[binop], keywords=[]) else: raise TypeError(f"Unexpected node type {type(node)}")
def visit_scope(self, node: AST, fn_scope: bool = False, locals_: List[str] = ()): outer_scope = self.current_scope self.current_scope = Scope( item=node, locals_=dict.fromkeys(locals_), fn_scope=fn_scope, parent=outer_scope if outer_scope.fn_scope else outer_scope.parent) node._pyo_scope = self.current_scope self.generic_visit(node) self.current_scope = outer_scope
def main(): sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8') sys.stdin = io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8') try: opts = create_opts() src = sys.stdin.read() tokenizer = Tokenizer() tokens = tokenizer.parse(src) ast = AST() ast.parse(tokens) context = ast.traverse(opts=opts) print(context.buffer) except Tokenizer.ParseError as e: print(e) sys.exit(1) except AST.SyntaxError as e: print(e) sys.exit(2) sys.exit(0)
def p_modifiers(self, args): ''' modifiers ::= control single_command modifiers ::= alt single_command modifiers ::= alternative single_command modifiers ::= when single_command ''' value = { 'control': 'ctrl', 'alt': 'alt', 'alternative': 'alt', 'when': 'Super_L' } return AST('mod_plus_key', [value[args[0].type], args[1].meta[0]])
def test_ast_opts(self): a = AST() t = Tokenizer() opts = {} opts['get-me'] = 'I am superman' a.parse(t.parse('{{ opts.get("get-me") }}')) c = a.traverse(opts=opts) self.assertEqual(c.buffer, 'I am superman') a.parse(t.parse('{@ if opts.get("get-me"): @}I am superman{@ end @}')) c = a.traverse(opts=opts) self.assertEqual(c.buffer, 'I am superman')
def match(self, x): """ Accepts token_type (int) or the token char. Example: token_type: 7 (I think) token_char: '(' """ if type(x) is str: x = self.input.getTokenType(x) if self.LA(1) == x: # x is token_type ast_node = AST(self.LT( 1)) # Return an AST node created with the current token self.consume() return ast_node else: raise Exception( f"Expecting {self.input.getTokenName(x)}; found {self.LT(1)} on line # {self.LT(1)._line_number}" )
def translate_facts(node: ast.AST, names: dict, module: ModuleType) -> ast.AST: """Translate facts referred within a node with __Fact_<Class Name>.""" if isinstance(node, ast.Name): return translate_fact(node, node.id, names, module) elif isinstance(node, ast.Attribute): node.value = translate_facts(node.value, names, module) return translate_fact(node, node_names(node), names, module) else: for field, value in ast.iter_fields(node): if isinstance(value, ast.AST): setattr(node, field, translate_facts(value, names, module)) elif isinstance(value, list): value[:] = [ translate_facts(v, names, module) for v in value if isinstance(v, ast.AST) ] return node
def parse_term(feed): if feed.match('keyword', 'def'): token = feed.advance() argv = AST('argv', []) while feed.match('identifier'): identifier = feed.advance() argv.append( AST('identifier', (), identifier.string, identifier.source)) return AST('def', [argv], source=token.source) token = feed.advance(term_types) if token.type in ('identifier', 'number', 'string'): return AST(token.type, (), token.string, token.source) if token.type == 'lparen': stmt = parse_expr(feed) feed.advance('rparen') return stmt raise Exception("FAIL")
def evalnode(node: ast.AST, gvars: Dict[str, Any]) -> Any: """ Tries to evaluate an AST node given only global variables. :param node: The AST node/subtree to evaluate. :param gvars: A dictionary mapping names to variables. :return: The result of evaluation, or raises ``SyntaxError`` on any failure to evaluate. """ if not isinstance(node, ast.AST): return node try: # Ensure context is load so eval works (e.g., when using value as lhs) if not isinstance(getattr(node, 'ctx', False), ast.Load): node = copy.deepcopy(node) node.ctx = ast.Load() return eval(compile(ast.Expression(node), '<string>', mode='eval'), gvars) except: # Anything can happen here raise SyntaxError
def p_letter(self, args): ''' letter ::= arch letter ::= bravo letter ::= boy letter ::= charlie letter ::= can letter ::= delta letter ::= eco letter ::= echo letter ::= fox letter ::= golf letter ::= gold letter ::= hotel letter ::= india letter ::= julia letter ::= kilo letter ::= line letter ::= mike letter ::= nor letter ::= november letter ::= oscar letter ::= papa letter ::= queen letter ::= roll letter ::= role letter ::= row letter ::= sierra letter ::= tango letter ::= uniform letter ::= uni letter ::= unique letter ::= victor letter ::= whiskey letter ::= whisky letter ::= why letter ::= xray letter ::= expert letter ::= yankee letter ::= zulu ''' if (args[0].type == 'expert'): args[0].type = 'x' return AST('char', [args[0].type[0]])
def call_First(self, node: ast.AST, args: List[ast.AST]) -> Any: 'We are in a sequence. Take the first element of the sequence and use that for future things.' # Unpack the source here assert len(args) == 1 source = args[0] # Make sure we are in a loop. seq = self.as_sequence(source) # The First terminal works by protecting the code with a if (first_time) {} block. # We need to declare the first_time variable outside the block where the thing we are # looping over here is defined. This is a little tricky, so we delegate to another method. loop_scope = seq.iterator_value().scope() outside_block_scope = loop_scope[-1] # Define the variable to track this outside that block. is_first = crep.cpp_variable(unique_name('is_first'), outside_block_scope, cpp_type=ctyp.terminal('bool'), initial_value=crep.cpp_value('true', self._gc.current_scope(), ctyp.terminal('bool'))) outside_block_scope.declare_variable(is_first) # Now, as long as is_first is true, we can execute things inside this statement. # The trick is putting the if statement in the right place. We need to locate it just one level # below where we defined the scope above. s = statement.iftest(is_first) s.add_statement(statement.set_var(is_first, crep.cpp_value('false', top_level_scope(), cpp_type=ctyp.terminal('bool')))) sv = seq.sequence_value() if isinstance(sv, crep.cpp_sequence): self._gc.set_scope(sv.iterator_value().scope()[-1]) else: self._gc.set_scope(sv.scope()) self._gc.add_statement(s) # If we just found the first sequence in a sequence, return that. # Otherwise return a new version of the value. first_value = sv if isinstance(sv, crep.cpp_sequence) else sv.copy_with_new_scope(self._gc.current_scope()) node.rep = first_value # type: ignore self._result = first_value
def _build_ast_helper(rule_table, grammar, grammar_symbol, tokens, stack_symbol, token_index): if stack_symbol in grammar_symbol.Maybe.values: is_maybe = True actual_symbol = stack_symbol.inner else: is_maybe = False actual_symbol = stack_symbol if actual_symbol in grammar_symbol.Base.values: if tokens[token_index].token_type == actual_symbol.inner: if token_index < len(tokens): return (token_index + 1, tokens[token_index]) elif is_maybe: return (token_index, None) else: raise Exception("Invalid syntax. Expected more tokens.") elif is_maybe: return (token_index, None) else: raise Exception("Invalid syntax. Expected " + actual_symbol.name + " but got " + tokens[token_index].token_type.name) else: rule = _find_rule(rule_table, grammar, actual_symbol, tokens, token_index, is_maybe) if rule == None: return (token_index, None) else: (prev_symbol, new_symbols) = rule.value def reduce_stack(i_and_ast_list, symbol): (i, ast_list) = i_and_ast_list (i_new, ast_new) = _build_ast_helper(rule_table, grammar, grammar_symbol, tokens, symbol, i) return (i_new, ast_list + [ast_new]) (i_new, ast_list) = reduce(reduce_stack, new_symbols, (token_index, [])) return (i_new, AST(rule, ast_list))
def p_letter(self, args): ''' letter ::= arch letter ::= alpha letter ::= bravo letter ::= charlie letter ::= delta letter ::= eco letter ::= echo letter ::= fox letter ::= golf letter ::= hotel letter ::= india letter ::= julia letter ::= kilo letter ::= lima letter ::= mike letter ::= november letter ::= oscar letter ::= papa letter ::= queen letter ::= romeo letter ::= sierra letter ::= tango letter ::= uniform letter ::= victor letter ::= whiskey letter ::= whisky letter ::= xray letter ::= expert letter ::= yankee letter ::= zulu ''' # note: arch is more easy to recognize than alpha # return the first letter of arg, unless the arg is "expert" # in which case override letter "x" over "e" if (args[0].type == 'expert'): args[0].type = 'x' return AST('char', [args[0].type[0]])
def test_ast_assign(self): t = Tokenizer() a = AST() a.parse(t.parse('''{@ a = "s" @}''')) c = a.traverse() self.assertEqual(c.syms['a'], 's') a.parse(t.parse('''{@ a = "s" @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['a'], 's') a.parse(t.parse('''{@ v = "v" v = "" @}''')) c = a.traverse() self.assertEqual(c.syms['v'], '') a.parse(t.parse('''{@ v = "v" v = "v2" @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('''{@ v = "v" v = "" v = "v2" @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('''{@ v = "v" @} {@ v = "" @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], '') a.parse(t.parse('''{@ v = "v" @} {@ v = "" @} {@ v = "v2" @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('''{@ v = "" @} {@ v = "v" @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v') a.parse(t.parse('{@ v = 1 + 2 @}')) c = a.traverse() self.assertEqual(c.syms['v'], 3) a.parse(t.parse('{@ v = v = 1 @}')) c = a.traverse() self.assertEqual(c.syms['v'], 1) a.parse(t.parse('{@ v = 1 + 2 * 3 @}')) c = a.traverse() self.assertEqual(c.syms['v'], 7) a.parse(t.parse('{@ v = (1 + 2) * 3 @}')) c = a.traverse() self.assertEqual(c.syms['v'], 9) a.parse(t.parse('{@ v = opts.get("a") @}')) c = a.traverse(opts={ 'a': 'b' }) self.assertEqual(c.syms['v'], "b")
def parse_stmt(feed): if feed.ignore('keyword', 'import'): token = feed.advance('identifier') return AST('import', (), token.string, token.source) elif feed.ignore('keyword', 'if'): stmt = AST('if', [parse_expr(feed)]) stmt.extend(parse_block(feed)) return stmt elif feed.ignore('keyword', 'elif'): stmt = AST('elif', [parse_expr(feed)]) stmt.extend(parse_block(feed)) return stmt elif feed.ignore('keyword', 'else'): stmt = AST('else', []) stmt.extend(parse_block(feed)) return stmt elif feed.ignore('keyword', 'while'): stmt = AST('while', [parse_expr(feed)]) stmt.extend(parse_block(feed)) return stmt elif feed.ignore('keyword', 'return'): stmt = AST('return', []) if match_term(feed): stmt.append(parse_expr(feed)) return stmt else: expr = parse_expr(feed) if feed.match('indent'): blocks = expr.find(('def',)) if len(blocks) > 0: blocks[0].extend(parse_block(feed)) elif expr.type != 'call': expr = AST('call', [expr]) expr.extend(parse_block(feed)) else: expr.extend(parse_block(feed)) return expr
def p_electricity(self, args): ''' electricity ::= light _action ''' return AST('elec', [chr(ord('0') + args[1])])
def p_english(self, args): ''' english ::= word ANY ''' return AST('sequence', [args[1].extra])
def get_formated_ast(self): ast = AST(self.result) tree = ast.generate(self.result) return ast.get_formated_ast(tree)
def parse_block(stream, indent): block = AST('block', []) while stream.can_advance('newline', number=indent): stream.advance('newline', number=indent) if stream.ignore('keyword', string='pass'): pass elif stream.ignore('keyword', string='def'): stmt = AST('def', []) if stream.can_advance('word'): stmt.string = stream.advance('word').string stmt.append(parse_argv(stream)) stmt.extend(parse_sub_block(stream, indent)) block.append(stmt) elif stream.ignore('keyword', string='return'): stmt = AST('return', []) stmt.append(parse_expression(stream)) block.append(stmt) elif stream.ignore('keyword', string='if'): stmt = AST('if', []) stmt.append(parse_expression(stream)) stmt.append(parse_sub_block(stream, indent)) block.append(stmt) elif stream.ignore('keyword', string='elif'): stmt = AST('elif', []) stmt.append(parse_expression(stream)) stmt.append(parse_sub_block(stream, indent)) block.append(stmt) elif stream.ignore('keyword', string='else'): stmt = AST('else', []) stmt.append(parse_sub_block(stream, indent)) block.append(stmt) else: expr = parse_expression(stream) if has_sub_block(stream, indent): expr = AST('call', [expr]) expr.extend(parse_sub_block(stream, indent)) block.append(expr) return block
def visit(self, node: AST) -> AST: node.marked = True # type: ignore return super().generic_visit(node)
def test_ast_comparison(self): t = Tokenizer() a = AST() a.parse(t.parse('{@ 0 == 0 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ 0 != 0 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('{@ 1 > 0 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ 1 < 0 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('{@ 1 >= 0 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ 1 <= 0 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('''{@ v = 0 v == 0 @}''')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('''{@ lhs = 0 rhs = 0 lhs == rhs @}''')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('''{@ lhs = "a" rhs = 0 lhs == rhs @}''')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('''{@ lhs = 0 rhs = "a" lhs == rhs @}''')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('{@ "a" == "b" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('{@ "a" != "b" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ "a" < "b" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ "a" > "b" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) a.parse(t.parse('{@ "a" <= "b" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ "a" >= "b" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0) """ この式はPythonではTrueになる CではFalseだ PHP,Rubyではパースエラーになる == 演算子の結果が bool(または int)であることを考えればこの式の結果は False になるべきだという印象を受ける しかし、ぱっと見た感じでは True が正しいようにも見える Cap ではこれは実装上の簡易さから False として扱う """ a.parse(t.parse('{@ "a" == "a" == "a" @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 0)
def test_ast_if(self): t = Tokenizer() a = AST() a.parse(t.parse('{@ if 1: @}abc{@ end @}')) c = a.traverse() self.assertEqual(c.buffer, 'abc') with self.assertRaises(AST.SyntaxError): a.parse(t.parse('{@ if 1 @}{@ end @}')) with self.assertRaises(AST.SyntaxError): a.parse(t.parse('{@ if @}{@ end @}')) with self.assertRaises(AST.SyntaxError): a.parse(t.parse('{@ if 1: @}{@ @}')) a.parse(t.parse('{@ if 1: v = "v" end @}')) c = a.traverse() self.assertEqual(c.syms['v'], 'v') a.parse(t.parse('{@ if 0: v = "v" else: v = "v2" end @}')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('{@ if 0: v = "v" elif 1: v = "v2" end @}')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('{@ if 0: v = "v" elif 0: v = "v2" else: v = "v3" end @}')) c = a.traverse() self.assertEqual(c.syms['v'], 'v3') a.parse(t.parse('''{@ if 1: v = "s" end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 's') a.parse(t.parse('''{@ if 1: v = "a" elif 2: v = "b" else: v = "c" end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: if 2: v = "a" end end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 0: else: if 2: v = "abc" end end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'abc') a.parse(t.parse('''{@ if 1: @}{@ end @}''')) c = a.traverse() a.parse(t.parse('''{@ if 0: @}{@ elif 1: @}{@ v = "a" @}{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 0: @}{@ elif 0: @}{@ else: @}{@ v = "a" @}{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ v = "a" @}{@ if 1: @}{@ if 2: @}{{ v }}{@ end @}{@ end @}bbb''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') self.assertEqual(c.buffer, "abbb") a.parse(t.parse('''{@ v = "a" @}{@ if 1: @}{{ v }}{{ v }}{@ end @}''')) a.parse(t.parse('''{@ v = "cat" if 1: @}{{ v }}{@ end if 1: end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'cat') self.assertEqual(c.buffer, 'cat') a.parse(t.parse('''{@ v = "a" @} {@ if 1: @} {{ v }} {@ end @} bbb''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') self.assertEqual(c.buffer, "a\nbbb") a.parse(t.parse('''{@ v = "a" if 1: v = "b" @}{{ v }}{@ end @} c''')) c = a.traverse() self.assertEqual(c.syms['v'], 'b') self.assertEqual(c.buffer, 'bc') a.parse(t.parse('''{@ if 0: @}{@ else: @}{@ v = "a" @}{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: @}abc{@ end @}''')) c = a.traverse() a.parse(t.parse('''{@ v = "a" @}{@ if 1: @}{{ v }}{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') self.assertEqual(c.buffer, 'a') a.parse(t.parse('''{@ v = "a" @}{@ if 0: @}{@ else: @}{{ v }}{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') self.assertEqual(c.buffer, 'a') a.parse(t.parse(''' {@ if 0: @} {@ else: @} {@ v = "a" @} {@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse(''' {@ if 0: @} {@ elif 1: @} {@ v = "a" @} {@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: @}{@ if 2: @}{@ v = "a" @}{@ end @}{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse(''' {@ if 1: @} {@ if 2: @} {@ v = "a" @} {@ end @} {@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse(''' {@ if 1: @} {@ if 0: @} {@ elif 1: @} {@ v = "a" @} {@ end @} {@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse(''' {@ if 1: @} {@ if 0: @} {@ elif 0: @} {@ else: @} {@ v = "a" @} {@ end @} {@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse(''' {@ if 1: @} {@ if 0: @} {@ elif 1: @} {@ v = "a" @} {@ end @} {@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: if 2: v = "a" end end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: if 2: v1 = "a" end else: if 0: elif 4: v2 = "b" end end @}''')) c = a.traverse() self.assertEqual(c.syms['v1'], 'a') a.parse(t.parse(''' {@ if 1: @}{@ if 2: @}{@ v = "a" @}{@ end @}{@ end @} ''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: if 2: @}{@ v = "a" @}{@ end end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 1: @}aaa{@ if 2: @}bbb{@ v = "ccc" @}{{ v }}{@ end @}ddd{@ end @}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'ccc') self.assertEqual(c.buffer, 'aaabbbcccddd') a.parse(t.parse('''aaa{@ if 1: @}bbb{@ if 2: @}ccc{@ v = "ddd" @}{{ v }}{@ end @}eee{@ end @}fff''')) c = a.traverse() self.assertEqual(c.syms['v'], 'ddd') self.assertEqual(c.buffer, 'aaabbbcccdddeeefff') c = a.traverse() a.parse(t.parse('''{@ if 1: v = "a" elif 0: v = "b" else: v = "c" end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'a') a.parse(t.parse('''{@ if 0: v = "a" elif 1: v = "b" else: v = "c" end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'b') a.parse(t.parse('''{@ if 0: v = "a" elif 0: v = "b" else: v = "c" end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'c') a.parse(t.parse('''{@ if 0: v1 = "v1" elif 1: v2 = "v2" if 0: v3 = "v3" else: v4 = "v4" end end @}{{ a }}''')) c = a.traverse() self.assertEqual('v1' not in c.syms.keys(), True) self.assertEqual(c.syms['v2'], 'v2') self.assertEqual('v3' not in c.syms.keys(), True) self.assertEqual(c.syms['v4'], 'v4') a.parse(t.parse('''{@ if 1: if 2: if 3: v = "v" end end end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v') a.parse(t.parse('''{@ if 1: if 2: if 3: v = "v" end v = "v2" end end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('''{@ if 1: if 2: if 3: v = "v" end end v = "v2" end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('''{@ if 1: v = "v" if 2: if 3: v = "v2" end end end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2') a.parse(t.parse('''{@ if 1: v = "v" if 2: v = "v2" if 3: end end end @}{{ a }}''')) c = a.traverse() self.assertEqual(c.syms['v'], 'v2')
def test_ast_import(self): t = Tokenizer() a = AST() with self.assertRaises(AST.SyntaxError): a.parse(t.parse('{@ import @}')) a.parse(t.parse('{@ import alias')) c = a.traverse() self.assertEqual(c.imported_alias, True) a.parse(t.parse('{@ import alias @}')) c = a.traverse() self.assertEqual(c.imported_alias, True) a.parse(t.parse('aaa{@ import alias @}bbb{@ import config @}ccc')) c = a.traverse() self.assertEqual(c.imported_alias, True) self.assertEqual(c.imported_config, True) a.parse(t.parse('{@ import alias @}{@ import config @}')) c = a.traverse() self.assertEqual(c.imported_alias, True) self.assertEqual(c.imported_config, True) a.parse(t.parse('''{@ import alias alias.set("dtl", "run bin/date-line/date-line.py") @}''')) c = a.traverse() self.assertEqual(c.alias_map['dtl'], 'run bin/date-line/date-line.py') a.parse(t.parse('''{@ import config config.set("editor", "subl") @}''')) c = a.traverse() self.assertEqual(c.config_map['editor'], 'subl')
def test_ast_expr(self): t = Tokenizer() a = AST() a.parse(t.parse('{@ 1 + 2 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 3) a.parse(t.parse('{@ 2 - 1 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 1) a.parse(t.parse('{@ 2 * 3 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 6) a.parse(t.parse('{@ 4 / 2 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 2) a.parse(t.parse('{@ 1 + 2 * 3 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 7) a.parse(t.parse('{@ 1 + 2 * 3 / 2 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 4) a.parse(t.parse('{@ (1 + 2) * 3 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 9) with self.assertRaises(AST.SyntaxError): a.parse(t.parse('{@ (1 + 2 @}')) a.parse(t.parse('{@ v = 1 + 2 @}')) c = a.traverse() self.assertEqual(c.last_expr_val, 3) self.assertEqual(c.syms['v'], 3) a.parse(t.parse('''{@ a = 1 + 2 v = a + 3 @}''')) c = a.traverse() self.assertEqual(c.last_expr_val, 6) self.assertEqual(c.syms['v'], 6)
# obj_f.close() # # end = time.time() # # print(f"*Output: '{f_name}'") # # print("Execution time: " + str(end - start) + "ms") # ======================================================= # = CSharp Compiltation = # ======================================================= # msgs_log.print_title("Doing CSharp Compilation") # start = time.time() # csc = os.environ['CSharpComp'] # src = os.path.splitext(f_name)[0] # dst = args.dst if args.dst else f"{f_name}" # os.system(f'{csc}/csc -optimize /nologo -out:\"{dst}.exe\" \"{src}.cs\"') # end = time.time() # print(f"*Output: '{src}.exe'") # print("Execution time: " + str(end - start) + "ms") # ======================================================= # = RunExecution time = # ======================================================= if not parser.error: # start = time.time() # msgs_log.print_title("Runtime") interpreter = Interpreter(None) interpreter.visit(ast) # end = time.time() # print("Execution time: " + str(end - start) + "ms") ast2 = AST(ast, parser.current_symb_tbl) # ast2.print()
def parse_program(feed): program = AST('program', []) while feed.ignore('newline'): if not feed.ignore('keyword', 'pass'): program.append(parse_stmt(feed)) return program
def p_number_rule(self, args): ''' number_rule ::= number _number ''' return AST('char', [chr(ord('0') + args[1])])
def test_ast_basic(self): t = Tokenizer() a = AST() a.parse(t.parse('')) self.assertEqual(a.root, None) a.parse(t.parse('abc')) c = a.traverse() self.assertEqual(a.root.text_block.text, 'abc') a.parse(t.parse('{@')) a.parse(t.parse('{@ v = "v"')) with self.assertRaises(AST.SyntaxError): a.parse(t.parse('{@ 1: @}'))