def get_components(filename, names): try: with open(filename, 'r') as source: source_text = source.read() except: from calvin.utilities.issuetracker import IssueTracker it = IssueTracker() it.add_error('File not found', {'script': filename}) return [], it return calvin_components(source_text, names)
def parse(self, source_text): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text try: ir = self.parser.parse(source_text) except SyntaxError as e: self.issuetracker.add_error(e.text, { 'line': e.lineno, 'col': e.offset }) ir = ast.Node() return ir, self.issuetracker
def parse(self, source_text, logger=None): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text root = None try: root = self.parser.parse(source_text, debug=logger) except SyntaxError as e: self.issuetracker.add_error(e.text, {'line':e.lineno, 'col':e.offset}) finally: ir = root or ast.Node() return ir, self.issuetracker
def route_request(self, handle, connection, command, headers, data): if self.node.quitting: # Answer internal error on all requests while quitting, assume client can handle that # TODO: Answer permantely moved (301) instead with header Location: <another-calvin-runtime>??? self.send_response(handle, connection, None, status=calvinresponse.INTERNAL_ERROR) return try: issuetracker = IssueTracker() handler, match = self._handler_for_route(command) if handler: credentials = None if data: data = json.loads(data) _log.debug("Calvin control handles:%s\n%s\n---------------" % (command, data)) handler(handle, connection, match, data, headers) else: _log.error("No route found for: %s\n%s" % (command, data)) self.send_response(handle, connection, None, status=404) except Exception as e: _log.info("Failed to parse request", exc_info=e) self.send_response(handle, connection, None, status=calvinresponse.BAD_REQUEST)
def process(self, source_file, issuetracker=None): path = os.path.dirname(source_file) self.path = _expand_path(path) self.source_file = _expand_path(source_file) self.issuetracker = issuetracker or IssueTracker() self.line_number = 0 return self._process(), self.issuetracker
def parse(self, source_text): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text try: ir = self.parser.parse(source_text) except SyntaxError as e: self.issuetracker.add_error(e.text, {"line": e.lineno, "col": e.offset}) ir = ast.Node() return ir, self.issuetracker
def _handle_policy_decision(data, appname, verify, access_decision, org_cb, security=None): if not access_decision: _log.error("Access denied") # This error reason is detected in calvin control and gives proper REST response _exit_with_error(org_cb) return if 'app_info' not in data and 'script' in data: deployable, issuetracker = compile_script(data['script'], appname) elif 'app_info' in data: deployable = data['app_info'] issuetracker = IssueTracker() else: _log.error("Neither app_info or script supplied") # This error reason is detected in calvin control and gives proper REST response _exit_with_error(org_cb) return org_cb(deployable, issuetracker, security=security)
def visualize_component(source_text, name): # STUB from calvin.utilities.issuetracker import IssueTracker it = IssueTracker() it.add_error('Visualizing components not yet implemented.') return "digraph structs {ERROR}", it
class CalvinParser(object): """docstring for CalvinParser""" def __init__(self, lexer=None): super(CalvinParser, self).__init__() if lexer: self.lexer = lexer else: self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=False) # Since the parse may be called from other scripts, we want to have control # over where parse tables (and parser.out log) will be put if the tables # have to be recreated this_file = os.path.realpath(__file__) containing_dir = os.path.dirname(this_file) self.parser = yacc.yacc(module=self, debug=True, optimize=False, outputdir=containing_dir) tokens = calvin_tokens precedence = ( ('left', 'OR'), ('left', 'AND'), ('right', 'UNOT'), ) def p_script(self, p): """script : opt_constdefs opt_compdefs opt_program""" root = ast.Node() root.add_children(p[1] + p[2] + p[3]) p[0] = root def p_empty(self, p): """empty : """ pass def p_opt_constdefs(self, p): """opt_constdefs : constdefs | empty""" p[0] = p[1] or [] def p_constdefs(self, p): """constdefs : constdefs constdef | constdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_constdef(self, p): """constdef : DEFINE identifier EQ argument""" constdef = ast.Constant(ident=p[2], arg=p[4], debug_info=self.debug_info(p, 1)) p[0] = constdef def p_opt_compdefs(self, p): """opt_compdefs : compdefs | empty""" p[0] = p[1] or [] def p_compdefs(self, p): """compdefs : compdefs compdef | compdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_compdef(self, p): """compdef : COMPONENT qualified_name LPAREN opt_argnames RPAREN opt_argnames RARROW opt_argnames LBRACE docstring comp_statements RBRACE""" p[0] = ast.Component(name=p[2], arg_names=p[4], inports=p[6], outports=p[8], docstring=p[10], program=p[11], debug_info=self.debug_info(p, 1)) def p_docstring(self, p): """docstring : DOCSTRING | empty""" p[0] = p[1] or "Someone(TM) should write some documentation for this component." def p_comp_statements(self, p): """comp_statements : comp_statements comp_statement | comp_statement""" if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_comp_statement(self, p): """comp_statement : assignment | port_property | internal_port_property | link""" p[0] = p[1] def p_opt_program(self, p): """opt_program : program | empty""" p[0] = [] if p[1] is None else [ast.Block(program=p[1], namespace='__scriptname__', debug_info=self.debug_info(p, 1))] def p_program(self, p): """program : program statement | statement """ if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_statement(self, p): """statement : assignment | port_property | link | define_rule | group | apply""" p[0] = p[1] def p_assignment(self, p): """assignment : IDENTIFIER COLON qualified_name LPAREN named_args RPAREN""" p[0] = ast.Assignment(ident=p[1], actor_type=p[3], args=p[5], debug_info=self.debug_info(p, 1)) def p_opt_direction(self, p): """opt_direction : LBRACK IDENTIFIER RBRACK | empty""" if p[1] is None: p[0] = None else: if p[2] not in ['in', 'out']: info = { 'line': p.lineno(2), 'col': self._find_column(p.lexpos(2)) } self.issuetracker.add_error('Invalid direction ({}).'.format(p[2]), info) p[0] = p[2] def p_port_property(self, p): """port_property : qualified_port opt_direction LPAREN named_args RPAREN""" _, (actor, port), direction, _, args, _ = p[:] p[0] = ast.PortProperty(actor=actor, port=port, direction=direction, args=args, debug_info=self.debug_info(p, 3)) def p_internal_port_property(self, p): """internal_port_property : unqualified_port opt_direction LPAREN named_args RPAREN""" _, (actor, port), direction, _, args, _ = p[:] p[0] = ast.PortProperty(actor=actor, port=port, direction=direction, args=args, debug_info=self.debug_info(p, 3)) def p_link_error(self, p): """link : void GT void""" info = { 'line': p.lineno(2), 'col': self._find_column(p.lexpos(2)) } self.issuetracker.add_error('Pointless construct.', info) def p_link(self, p): """link : real_outport GT void | void GT real_inport_list | real_outport GT inport_list | implicit_outport GT inport_list | internal_outport GT inport_list""" p[0] = ast.Link(outport=p[1], inport=p[3], debug_info=self.debug_info(p, 1)) def p_void(self, p): """void : VOIDPORT""" p[0] = ast.Void(debug_info=self.debug_info(p, 1)) def p_inport_list(self, p): """inport_list : inport_list COMMA inport | inport""" if len(p) == 2: p[0] = ast.PortList() p[0].add_child(p[1]) else: p[1].add_child(p[3]) p[0] = p[1] def p_real_inport_list(self, p): """real_inport_list : inport_list COMMA real_inport | real_inport""" if len(p) == 2: p[0] = ast.PortList() p[0].add_child(p[1]) else: p[1].add_child(p[3]) p[0] = p[1] def p_inport(self, p): """inport : real_or_internal_inport | transformed_inport""" p[0]=p[1] def p_transformed_inport(self, p): """transformed_inport : port_transform real_or_internal_inport""" arg, label = p[1] p[0] = ast.TransformedPort(port=p[2], value=arg, label=label, debug_info=self.debug_info(p, 2)) def p_implicit_outport(self, p): """implicit_outport : argument | label argument""" arg, label = (p[1], None) if len(p) == 2 else (p[2], p[1]) p[0] = ast.ImplicitPort(arg=arg, label=label, debug_info=self.debug_info(p, 1)) def p_real_or_internal_inport(self, p): """real_or_internal_inport : real_inport | internal_inport""" p[0] = p[1] def p_opt_tag(self, p): """opt_tag : AT tag_value | empty""" p[0] = p[1] if p[1] is None else p[2] def p_tag_value(self, p): """tag_value : NUMBER | STRING""" # FIXME: Verify that number is positive integer p[0] = p[1] def p_real_inport(self, p): """real_inport : opt_tag qualified_port""" _, tag, (actor, port) = p[:] p[0] = ast.InPort(actor=actor, port=port, tag=tag, debug_info=self.debug_info(p, 2)) def p_real_outport(self, p): """real_outport : qualified_port""" actor, port = p[1] p[0] = ast.OutPort(actor=actor, port=port, debug_info=self.debug_info(p, 1)) def p_internal_inport(self, p): """internal_inport : unqualified_port""" _, port = p[1] p[0] = ast.InternalInPort(port=port, debug_info=self.debug_info(p, 1)) def p_internal_outport(self, p): """internal_outport : unqualified_port""" _, port = p[1] p[0] = ast.InternalOutPort(port=port, debug_info=self.debug_info(p, 1)) def p_port_transform(self, p): """port_transform : SLASH argument SLASH | SLASH label argument SLASH""" p[0] = (p[2], None) if len(p) == 4 else (p[3], p[2]) def p_qualified_port(self, p): """qualified_port : IDENTIFIER DOT IDENTIFIER""" p[0] = (p[1], p[3]) def p_unqualified_port(self, p): """unqualified_port : DOT IDENTIFIER""" p[0] = (None, p[2]) def p_label(self, p): """label : COLON identifier""" p[0] = p[2] def p_named_args(self, p): """named_args : named_args named_arg COMMA | named_args named_arg | empty""" p[0] = p[1] + [p[2]] if p[1] is not None else [] def p_named_arg(self, p): """named_arg : identifier EQ argument""" p[0] = ast.NamedArg(ident=p[1], arg=p[3], debug_info=self.debug_info(p, 1)) def p_argument(self, p): """argument : value | identifier""" p[0] = p[1] def p_opt_argnames(self, p): """opt_argnames : argnames | empty""" p[0] = p[1] if p[1] is not None else [] def p_argnames(self, p): """argnames : argnames COMMA IDENTIFIER | IDENTIFIER""" p[0] = [p[1]] if len(p) == 2 else p[1]+ [p[3]] # def p_opt_identifiers(self, p): # """opt_identifiers : identifiers # | empty""" # p[0] = [p[1]] if p[1] is not None else [] def p_identifiers(self, p): """identifiers : identifiers COMMA identifier | identifier""" p[0] = [p[1]] if len(p) == 2 else p[1]+ [p[3]] def p_identifier(self, p): """identifier : IDENTIFIER""" p[0] = ast.Id(ident=p[1], debug_info=self.debug_info(p, 1)) # Concatenation of strings separated only by whitespace # since linebreaks are not allowed inside strings def p_string(self, p): """string : STRING | string STRING""" p[0] = p[1] if len(p) == 2 else p[1] + p[2] def p_value(self, p): """value : dictionary | array | bool | null | NUMBER | string""" p[0] = ast.Value(value=p[1], debug_info=self.debug_info(p, 1)) def p_bool(self, p): """bool : TRUE | FALSE""" p[0] = bool(p.slice[1].type == 'TRUE') def p_null(self, p): """null : NULL""" p[0] = None def p_dictionary(self, p): """dictionary : LBRACE members RBRACE""" p[0] = dict(p[2]) def p_members(self, p): """members : members member COMMA | members member | empty""" p[0] = p[1] + [p[2]] if p[1] is not None else [] def p_member(self, p): """member : string COLON value""" p[0] = (p[1], p[3].value) def p_values(self, p): """values : values value COMMA | values value | empty""" p[0] = p[1] + [p[2].value] if p[1] is not None else [] def p_array(self, p): """array : LBRACK values RBRACK""" p[0] = p[2] def p_qualified_name(self, p): """qualified_name : qualified_name DOT IDENTIFIER | IDENTIFIER""" if len(p) == 4: # Concatenate name p[0] = p[1] + p[2] + p[3] else: p[0] = p[1] ######################################################################################## # # Deploy rules # # defrule : RULE identifier COLON rule # # rule : rule op rule # | LPAREN rule RPAREN # | UNOT rule # | predicate # | identifier # # op : AND # | OR # # predicate : identifier LPAREN named_arguments RPAREN # # apply : APPLY identifier_list COLON rule # (or possibly) # apply : APPLY rule COLON identifier_list # ######################################################################################## def p_define_rule(self, p): """define_rule : RULE identifier COLON rule""" p[0] = ast.RuleDefinition(name=p[2], rule=p[4], debug_info=self.debug_info(p, 1)) def p_combine_rule(self, p): """rule : rule AND rule | rule OR rule""" p[0] = ast.SetOp(left=p[1], op=p[2], right=p[3], debug_info=self.debug_info(p, 1)) def p_negate_rule(self, p): """rule : UNOT rule""" p[0] = ast.UnarySetOp(rule=p[2], op=p[1], debug_info=self.debug_info(p, 1)) def p_subrule(self, p): """rule : LPAREN rule RPAREN""" p[0] = p[2] def p_rule(self, p): """rule : identifier | predicate""" p[0] = p[1] def p_predicate(self, p): """predicate : identifier LPAREN named_args RPAREN""" p[0] = ast.RulePredicate(predicate=p[1], args=p[3], debug_info=self.debug_info(p, 1)) def p_apply(self, p): """apply : APPLY identifiers COLON rule""" # or possibly # """apply : APPLY rule COLON identifiers""" p[0] = ast.RuleApply(optional=False, targets=p[2], rule=p[4], debug_info=self.debug_info(p,1)) def p_group(self, p): """group : GROUP identifier COLON identifiers""" p[0] = ast.Group(group=p[2], members=p[4], debug_info=self.debug_info(p, 1)) # Error rule for syntax errors def p_error(self, token): if not token: # Unexpected EOF lines = self.source_text.splitlines() info = { 'line': len(lines), 'col': len(lines[-1]) } self.issuetracker.add_error('Unexpected end of file.', info) return info = { 'line': token.lineno, 'col': self._find_column(token.lexpos) } self.issuetracker.add_error('Syntax error.', info) def _find_column(self, lexpos): last_cr = self.source_text.rfind('\n', 0, lexpos) # rfind returns -1 if not found, i.e. on 1st line, # which is exactly what we need in that case... column = lexpos - last_cr return column def debug_info(self, p, n): info = { 'line': p.lineno(n), 'col': self._find_column(p.lexpos(n)) } return info def parse(self, source_text, logger=None): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text root = None try: root = self.parser.parse(source_text, debug=logger) except SyntaxError as e: self.issuetracker.add_error(e.text, {'line':e.lineno, 'col':e.offset}) finally: ir = root or ast.Node() return ir, self.issuetracker
def _exit_with_error(callback): """Helper method to generate a proper error""" it = IssueTracker() it.add_error("UNAUTHORIZED", info={'status': 401}) callback({}, it) return
def compile_script_check_security(data, filename, cb, security=None, content=None, verify=True, node=None, signature=None): """ Compile a script and return a tuple (deployable, errors, warnings). 'credentials' are optional security credentials(?) 'verify' is deprecated and will be removed 'node' is the runtime performing security check(?) 'cb' is a CalvinCB callback N.B. If callback 'cb' is given, this method calls cb(deployable, errors, warnings) and returns None N.B. If callback 'cb' is given, and method runs to completion, cb is called with additional parameter 'security' (?) """ def _exit_with_error(callback): """Helper method to generate a proper error""" it = IssueTracker() it.add_error("UNAUTHORIZED", info={'status': 401}) callback({}, it) return def _handle_policy_decision(data, appname, verify, access_decision, org_cb, security=None): if not access_decision: _log.error("Access denied") # This error reason is detected in calvin control and gives proper REST response _exit_with_error(org_cb) return if 'app_info' not in data and 'script' in data: deployable, issuetracker = compile_script(data['script'], appname) elif 'app_info' in data: deployable = data['app_info'] issuetracker = IssueTracker() else: _log.error("Neither app_info or script supplied") # This error reason is detected in calvin control and gives proper REST response _exit_with_error(org_cb) return org_cb(deployable, issuetracker, security=security) # # Actual code for compile_script # appname = appname_from_filename(filename) # FIXME: if node is None we bypass security even if enabled. Is that the intention? if node is not None and security_enabled(): # FIXME: If cb is None, we will return from this method with None instead of a tuple, failing silently if security: sec = security else: sec = Security(node) verified, signer = sec.verify_signature_content(content, "application") if not verified: # Verification not OK if sign or cert not OK. _log.error("Failed application verification") # This error reason is detected in calvin control and gives proper REST response _exit_with_error(cb) return sec.check_security_policy(CalvinCB(_handle_policy_decision, data, appname, verify, security=security, org_cb=cb), element_type="application", element_value=signer) return # # We get here if node is None, or security is disabled # # This used to be # _handle_policy_decision(data, filename, verify, access_decision=True, security=None, org_cb=cb) # but since _handle_policy_decision is called with access_decision=True, security=None only compile_script would be called if 'app_info' not in data and 'script' in data: deployable, issuetracker = compile_script(data['script'], appname) elif 'app_info' in data: deployable = data['app_info'] issuetracker = IssueTracker() else: _log.error("Neither app_info or script supplied") # This error reason is detected in calvin control and gives proper REST response _exit_with_error(org_cb) return cb(deployable, issuetracker, security=None)
class CalvinParser(object): """docstring for CalvinParser""" def __init__(self, lexer=None): super(CalvinParser, self).__init__() if lexer: self.lexer = lexer else: self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=False) # Since the parse may be called from other scripts, we want to have control # over where parse tables (and parser.out log) will be put if the tables # have to be recreated this_file = os.path.realpath(__file__) containing_dir = os.path.dirname(this_file) self.parser = yacc.yacc(module=self, debug=False, optimize=True, outputdir=containing_dir) tokens = calvin_tokens def p_script(self, p): """script : opt_constdefs opt_compdefs opt_program""" s = ast.Node() s.add_children(p[1] + p[2] + p[3][0]) root = ast.Node() root.add_child(s.clone()) d = ast.Node() d.add_children(p[1] + p[3][1]) root.add_child(d) p[0] = root def p_opt_constdefs(self, p): """opt_constdefs : | constdefs""" if len(p) == 2: p[0] = p[1] else: p[0] = [] def p_constdefs(self, p): """constdefs : constdefs constdef | constdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_constdef(self, p): """constdef : DEFINE identifier EQ argument""" constdef = ast.Constant(ident=p[2], arg=p[4], debug_info=self.debug_info(p, 1)) p[0] = constdef def p_opt_compdefs(self, p): """opt_compdefs : | compdefs""" if len(p) == 2: p[0] = p[1] else: p[0] = [] def p_compdefs(self, p): """compdefs : compdefs compdef | compdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_compdef(self, p): """compdef : COMPONENT qualified_name LPAREN identifiers RPAREN identifiers RARROW identifiers LBRACE docstring comp_statements RBRACE""" p[0] = ast.Component(name=p[2], arg_names=p[4], inports=p[6], outports=p[8], docstring=p[10], program=p[11], debug_info=self.debug_info(p, 1)) def p_docstring(self, p): """docstring : | DOCSTRING """ if len(p) == 1: p[0] = "Someone(TM) should write some documentation for this component." else: p[0] = p[1] def p_comp_statements(self, p): """comp_statements : comp_statements comp_statement | comp_statement""" if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_comp_statement(self, p): """comp_statement : assignment | port_property | internal_port_property | link""" p[0] = p[1] def p_opt_program(self, p): """opt_program : | program""" if len(p) == 1: p[0] = [[], []] else: p[0] = [[ ast.Block(program=p[1][0], namespace='__scriptname__', debug_info=self.debug_info(p, 1)) ], p[1][1]] def p_program(self, p): """program : program statement | statement """ if len(p) == 2: if type(p[1]) in [ast.Group, ast.Rule, ast.RuleApply]: p[0] = [[], [p[1]]] else: p[0] = [[p[1]], []] else: if type(p[2]) in [ast.Group, ast.Rule, ast.RuleApply]: p[0] = [p[1][0], p[1][1] + [p[2]]] else: p[0] = [p[1][0] + [p[2]], p[1][1]] def p_statement(self, p): """statement : assignment | port_property | link | rule | group | apply""" p[0] = p[1] def p_group(self, p): """group : GROUP identifier COLON ident_list""" p[0] = ast.Group(group=p[2], members=p[4], debug_info=self.debug_info(p, 1)) def p_ident_list(self, p): """ident_list : | ident_list identifier COMMA | ident_list identifier""" if len(p) > 2: p[1].append(p[2]) p[0] = p[1] if len(p) > 1 else [] def p_rule(self, p): """rule : RULE identifier COLON expression""" p[0] = ast.Rule(rule=p[2], expression=p[4], debug_info=self.debug_info(p, 1)) def p_expression(self, p): """expression : expression predicate | first_predicate""" if len(p) > 2: p[1].add_child(p[2]) p[0] = p[1] else: p[0] = ast.RuleExpression(first_predicate=p[1]) def p_first_predicate(self, p): """first_predicate : identifier | NOT identifier | identifier LPAREN named_args RPAREN | NOT identifier LPAREN named_args RPAREN""" # print p[1], p[3], self.debug_info(p, 1) if len(p) == 2: # identifier p[0] = ast.RulePredicate(predicate=p[1], type="rule", debug_info=self.debug_info(p, 1)) elif len(p) == 3: # NOT identifier p[0] = ast.RulePredicate(predicate=p[2], type="rule", op=ast.RuleSetOp(op="~"), debug_info=self.debug_info(p, 1)) elif len(p) == 5: # identifier LPAREN named_args RPAREN p[0] = ast.RulePredicate(predicate=p[1], type="constraint", args=p[3], debug_info=self.debug_info(p, 1)) else: # NOT identifier LPAREN named_args RPAREN p[0] = ast.RulePredicate(predicate=p[2], type="constraint", op=ast.RuleSetOp(op="~"), args=p[4], debug_info=self.debug_info(p, 1)) def p_predicate(self, p): """predicate : setop identifier | setop identifier LPAREN named_args RPAREN""" # print p[1], p[3], self.debug_info(p, 1) if len(p) == 3: # setop identifier p[0] = ast.RulePredicate(predicate=p[2], type="rule", op=p[1], debug_info=self.debug_info(p, 1)) else: # setop identifier LPAREN named_args RPAREN p[0] = ast.RulePredicate(predicate=p[2], type="constraint", op=p[1], args=p[4], debug_info=self.debug_info(p, 1)) def p_setop(self, p): """setop : AND | OR | AND NOT | OR NOT""" #print p[1], self.debug_info(p, 1) if len(p) == 2: p[0] = ast.RuleSetOp(op=p[1]) else: p[0] = ast.RuleSetOp(op=p[1] + p[2]) def p_apply(self, p): """apply : APPLY ident_list COLON expression | APPLY STAR ident_list COLON expression""" if len(p) == 5: # print p[2], p[4], self.debug_info(p, 1) p[0] = ast.RuleApply(optional=False, targets=p[2], rule=p[4], debug_info=self.debug_info(p, 1)) else: # print p[2], p[3], p[5], self.debug_info(p, 1) p[0] = ast.RuleApply(optional=True, targets=p[3], rule=p[5], debug_info=self.debug_info(p, 1)) def p_assignment(self, p): """assignment : IDENTIFIER COLON qualified_name LPAREN named_args RPAREN""" p[0] = ast.Assignment(ident=p[1], actor_type=p[3], args=p[5], debug_info=self.debug_info(p, 1)) def p_opt_direction(self, p): """opt_direction : | LBRACK IDENTIFIER RBRACK""" if len(p) == 1: p[0] = None else: if p[2] not in ['in', 'out']: info = { 'line': p.lineno(2), 'col': self._find_column(p.lexpos(2)) } self.issuetracker.add_error( 'Invalid direction ({}).'.format(p[2]), info) p[0] = p[2] def p_port_property(self, p): """port_property : IDENTIFIER DOT IDENTIFIER opt_direction LPAREN named_args RPAREN""" p[0] = ast.PortProperty(actor=p[1], port=p[3], direction=p[4], args=p[6], debug_info=self.debug_info(p, 1)) def p_internal_port_property(self, p): """internal_port_property : DOT IDENTIFIER opt_direction LPAREN named_args RPAREN""" p[0] = ast.PortProperty(actor=None, port=p[2], direction=p[3], args=p[5], debug_info=self.debug_info(p, 1)) def p_link(self, p): """link : outport GT port | outport GT portlist | outport GT void | implicit_port GT port | implicit_port GT portlist | internal_outport GT inport | internal_outport GT inportlist | void GT inport | void GT inportlist """ p[0] = ast.Link(outport=p[1], inport=p[3], debug_info=self.debug_info(p, 1)) def p_link_error(self, p): """link : internal_outport GT internal_inport""" info = {'line': p.lineno(2), 'col': self._find_column(p.lexpos(2))} self.issuetracker.add_error( 'Component inport connected directly to outport.', info) # def p_portmap(self, p): # """portmap : port GT internal_port # | internal_port GT port""" # p[0] = ast.Portmap(p[1], p[3]) def p_void(self, p): """void : VOIDPORT""" p[0] = ast.Void(debug_info=self.debug_info(p, 1)) def p_portlist(self, p): """portlist : portlist COMMA port | port COMMA port""" if type(p[1]) is ast.PortList: p[1].add_child(p[3]) p[0] = p[1] else: p[0] = ast.PortList() p[0].add_child(p[1]) p[0].add_child(p[3]) def p_inportlist(self, p): """inportlist : inportlist COMMA inport | inport COMMA inport""" if type(p[1]) is ast.PortList: p[1].add_child(p[3]) p[0] = p[1] else: p[0] = ast.PortList() p[0].add_child(p[1]) p[0].add_child(p[3]) def p_port(self, p): """port : inport | internal_inport | transformed_inport""" p[0] = p[1] def p_transformed_inport(self, p): """transformed_inport : SLASH argument SLASH port | SLASH COLON identifier argument SLASH port""" if len(p) > 5: p[0] = ast.TransformedPort(port=p[6], value=p[4], label=p[3], debug_info=self.debug_info(p, 4)) else: p[0] = ast.TransformedPort(port=p[4], value=p[2], debug_info=self.debug_info(p, 4)) def p_implicit_port(self, p): """implicit_port : argument | COLON identifier argument""" if len(p) > 2: p[0] = ast.ImplicitPort(arg=p[3], label=p[2], debug_info=self.debug_info(p, 1)) else: p[0] = ast.ImplicitPort(arg=p[1], debug_info=self.debug_info(p, 1)) def p_inport(self, p): """inport : IDENTIFIER DOT IDENTIFIER""" p[0] = ast.InPort(actor=p[1], port=p[3], debug_info=self.debug_info(p, 1)) def p_outport(self, p): """outport : IDENTIFIER DOT IDENTIFIER""" p[0] = ast.OutPort(actor=p[1], port=p[3], debug_info=self.debug_info(p, 1)) def p_internal_inport(self, p): """internal_inport : DOT IDENTIFIER""" p[0] = ast.InternalInPort(port=p[2], debug_info=self.debug_info(p, 1)) def p_internal_outport(self, p): """internal_outport : DOT IDENTIFIER""" p[0] = ast.InternalOutPort(port=p[2], debug_info=self.debug_info(p, 1)) def p_named_args(self, p): """named_args : | named_args named_arg COMMA | named_args named_arg""" if len(p) > 1: p[0] = p[1] + [p[2]] else: p[0] = [] def p_named_arg(self, p): """named_arg : identifier EQ argument""" p[0] = ast.NamedArg(ident=p[1], arg=p[3], debug_info=self.debug_info(p, 1)) def p_argument(self, p): """argument : value | identifier""" p[0] = p[1] def p_identifier(self, p): """identifier : IDENTIFIER""" p[0] = ast.Id(ident=p[1], debug_info=self.debug_info(p, 1)) # Concatenation of strings separated only by whitespace # since linebreaks are not allowed inside strings def p_string(self, p): """string : STRING | string STRING""" p[0] = p[1] if len(p) == 2 else p[1] + p[2] def p_value(self, p): """value : dictionary | array | bool | null | NUMBER | string | portref""" p[0] = ast.Value(value=p[1], debug_info=self.debug_info(p, 1)) def p_portref(self, p): """portref : AND IDENTIFIER DOT IDENTIFIER opt_direction | AND DOT IDENTIFIER opt_direction """ if len(p) == 6: _, _, actor, _, port, direction = p[:] ref = ast.PortRef(actor=actor, port=port, direction=direction, debug_info=self.debug_info(p, 1)) else: _, _, _, port, direction = p[:] ref = ast.InternalPortRef(port=port, direction=direction, debug_info=self.debug_info(p, 1)) p[0] = ref def p_bool(self, p): """bool : TRUE | FALSE""" p[0] = bool(p.slice[1].type == 'TRUE') def p_null(self, p): """null : NULL""" p[0] = None def p_dictionary(self, p): """dictionary : LBRACE members RBRACE""" p[0] = dict(p[2]) def p_members(self, p): """members : | members member COMMA | members member""" if len(p) == 1: p[0] = list() else: p[1].append(p[2]) p[0] = p[1] def p_member(self, p): """member : string COLON value""" p[0] = (p[1], p[3].value) def p_values(self, p): """values : | values value COMMA | values value""" if len(p) == 1: p[0] = list() else: p[1].append(p[2].value) p[0] = p[1] def p_array(self, p): """array : LBRACK values RBRACK""" p[0] = p[2] def p_identifiers(self, p): """identifiers : | identifiers IDENTIFIER COMMA | identifiers IDENTIFIER""" if len(p) > 2: p[1].append(p[2]) p[0] = p[1] if len(p) > 1 else [] def p_qualified_name(self, p): """qualified_name : qualified_name DOT IDENTIFIER | IDENTIFIER""" if len(p) == 4: # Concatenate name p[0] = p[1] + p[2] + p[3] else: p[0] = p[1] # Error rule for syntax errors def p_error(self, token): if not token: # Unexpected EOF lines = self.source_text.splitlines() info = {'line': len(lines), 'col': len(lines[-1])} self.issuetracker.add_error('Unexpected end of file.', info) return info = {'line': token.lineno, 'col': self._find_column(token.lexpos)} self.issuetracker.add_error('Syntax error.', info) def _find_column(self, lexpos): last_cr = self.source_text.rfind('\n', 0, lexpos) # rfind returns -1 if not found, i.e. on 1st line, # which is exactly what we need in that case... column = lexpos - last_cr return column def debug_info(self, p, n): info = {'line': p.lineno(n), 'col': self._find_column(p.lexpos(n))} return info def parse(self, source_text, logger=None): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text root = None try: root = self.parser.parse(source_text, debug=logger) except SyntaxError as e: self.issuetracker.add_error(e.text, { 'line': e.lineno, 'col': e.offset }) finally: ir, deploy_ir = root.children if root else (ast.Node(), ast.Node()) return ir, deploy_ir, self.issuetracker
class CalvinParser(object): """docstring for CalvinParser""" def __init__(self, lexer=None): super(CalvinParser, self).__init__() if lexer: self.lexer = lexer else: self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=True) # Since the parse may be called from other scripts, we want to have control # over where parse tables (and parser.out log) will be put if the tables # have to be recreated this_file = os.path.realpath(__file__) containing_dir = os.path.dirname(this_file) self.parser = yacc.yacc(module=self, debug=False, optimize=True, outputdir=containing_dir) tokens = calvin_tokens def p_script(self, p): """script : opt_constdefs opt_compdefs opt_program""" s = ast.Node() s.add_children(p[1] + p[2] + p[3]) p[0] = s def p_opt_constdefs(self, p): """opt_constdefs : | constdefs""" if len(p) == 2: p[0] = p[1] else: p[0] = [] def p_constdefs(self, p): """constdefs : constdefs constdef | constdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_constdef(self, p): """constdef : DEFINE identifier EQ argument""" constdef = ast.Constant(ident=p[2], arg=p[4], debug_info=self.debug_info(p, 1)) p[0] = constdef def p_opt_compdefs(self, p): """opt_compdefs : | compdefs""" if len(p) == 2: p[0] = p[1] else: p[0] = [] def p_compdefs(self, p): """compdefs : compdefs compdef | compdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_compdef(self, p): """compdef : COMPONENT qualified_name LPAREN identifiers RPAREN identifiers RARROW identifiers LBRACE docstring program RBRACE""" p[0] = ast.Component( name=p[2], arg_names=p[4], inports=p[6], outports=p[8], docstring=p[10], program=p[11], debug_info=self.debug_info(p, 1), ) def p_docstring(self, p): """docstring : | DOCSTRING """ if len(p) == 1: p[0] = "Someone(TM) should write some documentation for this component." else: p[0] = p[1] def p_opt_program(self, p): """opt_program : | program""" if len(p) == 1: p[0] = [] else: p[0] = [ast.Block(program=p[1], namespace="__scriptname__", debug_info=self.debug_info(p, 1))] def p_program(self, p): """program : program statement | statement """ if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_statement(self, p): """statement : assignment | link""" p[0] = p[1] def p_assignment(self, p): """assignment : IDENTIFIER COLON qualified_name LPAREN named_args RPAREN""" p[0] = ast.Assignment(ident=p[1], actor_type=p[3], args=p[5], debug_info=self.debug_info(p, 1)) def p_link(self, p): """link : outport GT port | outport GT portlist | outport GT void | implicit_port GT port | implicit_port GT portlist | internal_outport GT inport | internal_outport GT inportlist | void GT inport | void GT inportlist """ p[0] = ast.Link(outport=p[1], inport=p[3], debug_info=self.debug_info(p, 1)) def p_link_error(self, p): """link : internal_outport GT internal_inport""" info = {"line": p.lineno(2), "col": self._find_column(p.lexpos(2))} self.issuetracker.add_error("Component inport connected directly to outport.", info) # def p_portmap(self, p): # """portmap : port GT internal_port # | internal_port GT port""" # p[0] = ast.Portmap(p[1], p[3]) def p_void(self, p): """void : VOID""" p[0] = ast.Void(debug_info=self.debug_info(p, 1)) def p_portlist(self, p): """portlist : portlist COMMA port | port COMMA port""" if type(p[1]) is ast.PortList: p[1].add_child(p[3]) p[0] = p[1] else: p[0] = ast.PortList() p[0].add_child(p[1]) p[0].add_child(p[3]) def p_inportlist(self, p): """inportlist : inportlist COMMA inport | inport COMMA inport""" if type(p[1]) is ast.PortList: p[1].add_child(p[3]) p[0] = p[1] else: p[0] = ast.PortList() p[0].add_child(p[1]) p[0].add_child(p[3]) def p_port(self, p): """port : inport | internal_inport""" p[0] = p[1] def p_implicit_port(self, p): """implicit_port : argument""" p[0] = ast.ImplicitPort(arg=p[1], debug_info=self.debug_info(p, 1)) def p_inport(self, p): """inport : IDENTIFIER DOT IDENTIFIER""" p[0] = ast.InPort(actor=p[1], port=p[3], debug_info=self.debug_info(p, 1)) def p_outport(self, p): """outport : IDENTIFIER DOT IDENTIFIER""" p[0] = ast.OutPort(actor=p[1], port=p[3], debug_info=self.debug_info(p, 1)) def p_internal_inport(self, p): """internal_inport : DOT IDENTIFIER""" p[0] = ast.InternalInPort(port=p[2], debug_info=self.debug_info(p, 1)) def p_internal_outport(self, p): """internal_outport : DOT IDENTIFIER""" p[0] = ast.InternalOutPort(port=p[2], debug_info=self.debug_info(p, 1)) def p_named_args(self, p): """named_args : | named_args named_arg COMMA | named_args named_arg""" if len(p) > 1: p[0] = p[1] + [p[2]] else: p[0] = [] def p_named_arg(self, p): """named_arg : identifier EQ argument""" p[0] = ast.NamedArg(ident=p[1], arg=p[3], debug_info=self.debug_info(p, 1)) def p_argument(self, p): """argument : value | identifier""" p[0] = p[1] def p_identifier(self, p): """identifier : IDENTIFIER""" p[0] = ast.Id(ident=p[1], debug_info=self.debug_info(p, 1)) def p_value(self, p): """value : dictionary | array | bool | null | NUMBER | STRING""" p[0] = ast.Value(value=p[1], debug_info=self.debug_info(p, 1)) def p_bool(self, p): """bool : TRUE | FALSE""" p[0] = bool(p.slice[1].type == "TRUE") def p_null(self, p): """null : NULL""" p[0] = None def p_dictionary(self, p): """dictionary : LBRACE members RBRACE""" p[0] = dict(p[2]) def p_members(self, p): """members : | members member COMMA | members member""" if len(p) == 1: p[0] = list() else: p[1].append(p[2]) p[0] = p[1] def p_member(self, p): """member : STRING COLON value""" p[0] = (p[1], p[3].value) def p_values(self, p): """values : | values value COMMA | values value""" if len(p) == 1: p[0] = list() else: p[1].append(p[2].value) p[0] = p[1] def p_array(self, p): """array : LBRACK values RBRACK""" p[0] = p[2] def p_identifiers(self, p): """identifiers : | identifiers IDENTIFIER COMMA | identifiers IDENTIFIER""" if len(p) > 2: p[1].append(p[2]) p[0] = p[1] if len(p) > 1 else [] def p_qualified_name(self, p): """qualified_name : qualified_name DOT IDENTIFIER | IDENTIFIER""" if len(p) == 4: # Concatenate name p[0] = p[1] + p[2] + p[3] else: p[0] = p[1] # Error rule for syntax errors def p_error(self, token): if not token: # Unexpected EOF lines = self.source_text.splitlines() info = {"line": len(lines), "col": len(lines[-1])} self.issuetracker.add_error("Unexpected end of file.", info) return # FIXME: Better recovery # FIXME: [PP] This originated as an exception in the lexer, # there is more info to extract. info = {"line": token.lineno, "col": self._find_column(token.lexpos)} self.issuetracker.add_error("Syntax error.", info) # print self.parser.statestack # print self.parser.symstack # Trying to recover from here... def _find_column(self, lexpos): last_cr = self.source_text.rfind("\n", 0, lexpos) # rfind returns -1 if not found, i.e. on 1st line, # which is exactly what we need in that case... column = lexpos - last_cr return column def debug_info(self, p, n): info = {"line": p.lineno(n), "col": self._find_column(p.lexpos(n))} return info def parse(self, source_text): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text try: ir = self.parser.parse(source_text) except SyntaxError as e: self.issuetracker.add_error(e.text, {"line": e.lineno, "col": e.offset}) ir = ast.Node() return ir, self.issuetracker
def _exit_with_error(callback): """Helper method to generate a proper error""" it = IssueTracker() it.add_error("UNAUTHORIZED", info={'status':401}) callback({}, it) return
def handle_deploy(self, handle, connection, match, data, hdr): """ POST /deploy Compile and deploy a calvin script to this calvin node Apply deployment requirements to actors of an application and initiate migration of actors accordingly Body: { "name": <application name>, "script": <calvin script> # alternativly "app_info" "app_info": <compiled script as app_info> # alternativly "script" "sec_sign": {<cert hash>: <security signature of script>, ...} # optional and only with "script" "sec_credentials": <security credentials of user> # optional "deploy_info": {"groups": {"<group 1 name>": ["<actor instance 1 name>", ...]}, # TODO not yet implemented "requirements": { "<actor instance 1 name>": [ {"op": "<matching rule name>", "kwargs": {<rule param key>: <rule param value>, ...}, "type": "+" or "-" for set intersection or set removal, respectively }, ... ], ... } } } Note that either a script or app_info must be supplied. Optionally security verification of application script can be made. Also optionally user credentials can be supplied, some runtimes are configured to require credentials. The credentials takes for example the following form: {"user": <username>, "password": <password>, "role": <role>, "group": <group>, ... } The matching rules are implemented as plug-ins, intended to be extended. The type "+" is "and"-ing rules together (actually the intersection of all possible nodes returned by the rules.) The type "-" is explicitly removing the nodes returned by this rule from the set of possible nodes. Note that only negative rules will result in no possible nodes, i.e. there is no implied "all but these." A special matching rule exist, to first form a union between matching rules, i.e. alternative matches. This is useful for e.g. alternative namings, ownerships or specifying either of two specific nodes. {"op": "union_group", "requirements": [list as above of matching rules but without type key] "type": "+" } Other matching rules available is current_node, all_nodes and node_attr_match which takes an index param which is attribute formatted, e.g. {"op": "node_attr_match", "kwargs": {"index": ["node_name", {"organization": "org.testexample", "name": "testNode1"}]} "type": "+" } Response status code: OK, CREATED, BAD_REQUEST, UNAUTHORIZED or INTERNAL_ERROR Response: {"application_id": <application-id>, "actor_map": {<actor name with namespace>: <actor id>, ...} "placement": {<actor_id>: <node_id>, ...}, "requirements_fulfilled": True/False} Failure response: {'errors': <compilation errors>, 'warnings': <compilation warnings>, 'exception': <exception string>} """ try: _log.analyze(self.node.id, "+", data) if 'app_info' not in data: kwargs = {} credentials = "" # Supply security verification data when available content = None if "sec_credentials" in data: credentials = data['sec_credentials'] content = {} if not "sec_sign" in data: data['sec_sign'] = {} content = { 'file': data["script"], 'sign': { h: s.decode('hex_codec') for h, s in data['sec_sign'].iteritems() } } compiler.compile_script_check_security( data["script"], filename=data["name"], security=self.security, content=content, node=self.node, verify=(data["check"] if "check" in data else True), cb=CalvinCB(self.handle_deploy_cont, handle=handle, connection=connection, data=data), **kwargs) else: # Supplying app_info is for backward compatibility hence abort if node configured security # Main user is csruntime when deploying script at the same time and some tests used # via calvin.Tools.deployer (the Deployer below is the new in appmanager) # TODO rewrite these users to send the uncompiled script as cscontrol does. if security_enabled(): _log.error( "Can't combine compiled script with runtime having security" ) self.send_response(handle, connection, None, status=calvinresponse.UNAUTHORIZED) return app_info = data['app_info'] issuetracker = IssueTracker() self.handle_deploy_cont(app_info, issuetracker, handle, connection, data) except Exception as e: _log.exception("Deployer failed, e={}".format(e)) self.send_response(handle, connection, json.dumps({'exception': str(e)}), status=calvinresponse.INTERNAL_ERROR)
class CalvinParser(object): """docstring for CalvinParser""" def __init__(self, lexer=None): super(CalvinParser, self).__init__() if lexer: self.lexer = lexer else: self.lexer = lex.lex(module=calvin_rules, debug=False, optimize=True) # Since the parse may be called from other scripts, we want to have control # over where parse tables (and parser.out log) will be put if the tables # have to be recreated this_file = os.path.realpath(__file__) containing_dir = os.path.dirname(this_file) self.parser = yacc.yacc(module=self, debug=False, optimize=True, outputdir=containing_dir) tokens = calvin_tokens def p_script(self, p): """script : opt_constdefs opt_compdefs opt_program""" s = ast.Node() s.add_children(p[1] + p[2] + p[3]) p[0] = s def p_opt_constdefs(self, p): """opt_constdefs : | constdefs""" if len(p) == 2: p[0] = p[1] else: p[0] = [] def p_constdefs(self, p): """constdefs : constdefs constdef | constdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_constdef(self, p): """constdef : DEFINE identifier EQ argument""" constdef = ast.Constant(ident=p[2], arg=p[4], debug_info=self.debug_info(p, 1)) p[0] = constdef def p_opt_compdefs(self, p): """opt_compdefs : | compdefs""" if len(p) == 2: p[0] = p[1] else: p[0] = [] def p_compdefs(self, p): """compdefs : compdefs compdef | compdef""" if len(p) == 3: p[0] = p[1] + [p[2]] else: p[0] = [p[1]] def p_compdef(self, p): """compdef : COMPONENT qualified_name LPAREN identifiers RPAREN identifiers RARROW identifiers LBRACE docstring program RBRACE""" p[0] = ast.Component(name=p[2], arg_names=p[4], inports=p[6], outports=p[8], docstring=p[10], program=p[11], debug_info=self.debug_info(p, 1)) def p_docstring(self, p): """docstring : | DOCSTRING """ if len(p) == 1: p[0] = "Someone(TM) should write some documentation for this component." else: p[0] = p[1] def p_opt_program(self, p): """opt_program : | program""" if len(p) == 1: p[0] = [] else: p[0] = [ ast.Block(program=p[1], namespace='__scriptname__', debug_info=self.debug_info(p, 1)) ] def p_program(self, p): """program : program statement | statement """ if len(p) == 2: p[0] = [p[1]] else: p[0] = p[1] + [p[2]] def p_statement(self, p): """statement : assignment | link""" p[0] = p[1] def p_assignment(self, p): """assignment : IDENTIFIER COLON qualified_name LPAREN named_args RPAREN""" p[0] = ast.Assignment(ident=p[1], actor_type=p[3], args=p[5], debug_info=self.debug_info(p, 1)) def p_link(self, p): """link : outport GT port | outport GT portlist | outport GT void | implicit_port GT port | implicit_port GT portlist | internal_outport GT inport | internal_outport GT inportlist | void GT inport | void GT inportlist """ p[0] = ast.Link(outport=p[1], inport=p[3], debug_info=self.debug_info(p, 1)) def p_link_error(self, p): """link : internal_outport GT internal_inport""" info = {'line': p.lineno(2), 'col': self._find_column(p.lexpos(2))} self.issuetracker.add_error( 'Component inport connected directly to outport.', info) # def p_portmap(self, p): # """portmap : port GT internal_port # | internal_port GT port""" # p[0] = ast.Portmap(p[1], p[3]) def p_void(self, p): """void : VOID""" p[0] = ast.Void(debug_info=self.debug_info(p, 1)) def p_portlist(self, p): """portlist : portlist COMMA port | port COMMA port""" if type(p[1]) is ast.PortList: p[1].add_child(p[3]) p[0] = p[1] else: p[0] = ast.PortList() p[0].add_child(p[1]) p[0].add_child(p[3]) def p_inportlist(self, p): """inportlist : inportlist COMMA inport | inport COMMA inport""" if type(p[1]) is ast.PortList: p[1].add_child(p[3]) p[0] = p[1] else: p[0] = ast.PortList() p[0].add_child(p[1]) p[0].add_child(p[3]) def p_port(self, p): """port : inport | internal_inport""" p[0] = p[1] def p_implicit_port(self, p): """implicit_port : argument""" p[0] = ast.ImplicitPort(arg=p[1], debug_info=self.debug_info(p, 1)) def p_inport(self, p): """inport : IDENTIFIER DOT IDENTIFIER""" p[0] = ast.InPort(actor=p[1], port=p[3], debug_info=self.debug_info(p, 1)) def p_outport(self, p): """outport : IDENTIFIER DOT IDENTIFIER""" p[0] = ast.OutPort(actor=p[1], port=p[3], debug_info=self.debug_info(p, 1)) def p_internal_inport(self, p): """internal_inport : DOT IDENTIFIER""" p[0] = ast.InternalInPort(port=p[2], debug_info=self.debug_info(p, 1)) def p_internal_outport(self, p): """internal_outport : DOT IDENTIFIER""" p[0] = ast.InternalOutPort(port=p[2], debug_info=self.debug_info(p, 1)) def p_named_args(self, p): """named_args : | named_args named_arg COMMA | named_args named_arg""" if len(p) > 1: p[0] = p[1] + [p[2]] else: p[0] = [] def p_named_arg(self, p): """named_arg : identifier EQ argument""" p[0] = ast.NamedArg(ident=p[1], arg=p[3], debug_info=self.debug_info(p, 1)) def p_argument(self, p): """argument : value | identifier""" p[0] = p[1] def p_identifier(self, p): """identifier : IDENTIFIER""" p[0] = ast.Id(ident=p[1], debug_info=self.debug_info(p, 1)) def p_value(self, p): """value : dictionary | array | bool | null | NUMBER | STRING""" p[0] = ast.Value(value=p[1], debug_info=self.debug_info(p, 1)) def p_bool(self, p): """bool : TRUE | FALSE""" p[0] = bool(p.slice[1].type == 'TRUE') def p_null(self, p): """null : NULL""" p[0] = None def p_dictionary(self, p): """dictionary : LBRACE members RBRACE""" p[0] = dict(p[2]) def p_members(self, p): """members : | members member COMMA | members member""" if len(p) == 1: p[0] = list() else: p[1].append(p[2]) p[0] = p[1] def p_member(self, p): """member : STRING COLON value""" p[0] = (p[1], p[3].value) def p_values(self, p): """values : | values value COMMA | values value""" if len(p) == 1: p[0] = list() else: p[1].append(p[2].value) p[0] = p[1] def p_array(self, p): """array : LBRACK values RBRACK""" p[0] = p[2] def p_identifiers(self, p): """identifiers : | identifiers IDENTIFIER COMMA | identifiers IDENTIFIER""" if len(p) > 2: p[1].append(p[2]) p[0] = p[1] if len(p) > 1 else [] def p_qualified_name(self, p): """qualified_name : qualified_name DOT IDENTIFIER | IDENTIFIER""" if len(p) == 4: # Concatenate name p[0] = p[1] + p[2] + p[3] else: p[0] = p[1] # Error rule for syntax errors def p_error(self, token): if not token: # Unexpected EOF lines = self.source_text.splitlines() info = {'line': len(lines), 'col': len(lines[-1])} self.issuetracker.add_error('Unexpected end of file.', info) return # FIXME: Better recovery # FIXME: [PP] This originated as an exception in the lexer, # there is more info to extract. info = {'line': token.lineno, 'col': self._find_column(token.lexpos)} self.issuetracker.add_error('Syntax error.', info) # print self.parser.statestack # print self.parser.symstack # Trying to recover from here... def _find_column(self, lexpos): last_cr = self.source_text.rfind('\n', 0, lexpos) # rfind returns -1 if not found, i.e. on 1st line, # which is exactly what we need in that case... column = lexpos - last_cr return column def debug_info(self, p, n): info = {'line': p.lineno(n), 'col': self._find_column(p.lexpos(n))} return info def parse(self, source_text): # return ir (AST) and issuetracker self.issuetracker = IssueTracker() self.source_text = source_text try: ir = self.parser.parse(source_text) except SyntaxError as e: self.issuetracker.add_error(e.text, { 'line': e.lineno, 'col': e.offset }) ir = ast.Node() return ir, self.issuetracker