def _create_parser(self): semicolon = Suppress(Word(";")) quote = Suppress(Word("\"")) op = Suppress(Word("{")) cl = Suppress(Word("}")) opp = Suppress(Word("(")) clp = Suppress(Word(")")) lt = Suppress(Word("<")) gt = Suppress(Word(">")) eq = Suppress(Word("=")) identifier = Word(alphas + "_", alphanums + "_") typeIdentifier = Word(alphas + "_", alphanums + "_:") structIdentifer = Group( typeIdentifier.setResultsName('type') + identifier.setResultsName('identifier') + Optional(eq) + Optional( CharsNotIn(";").setResultsName('defaultValue')) + semicolon) structIdentifers = Group(OneOrMore(structIdentifer)) ## Imports idslImport = Suppress(Word("import")) + quote + CharsNotIn("\";").setResultsName('path') + quote + semicolon idslImports = ZeroOrMore(idslImport) structDef = Word("struct").setResultsName('type') + identifier.setResultsName( 'name') + op + structIdentifers.setResultsName("structIdentifiers") + cl + semicolon dictionaryDef = Word("dictionary").setResultsName('type') + lt + CharsNotIn("<>").setResultsName( 'content') + gt + identifier.setResultsName('name') + semicolon sequenceDef = Word("sequence").setResultsName('type') + lt + typeIdentifier.setResultsName( 'typeSequence') + gt + identifier.setResultsName('name') + semicolon enumDef = Word("enum").setResultsName('type') + identifier.setResultsName('name') + op + CharsNotIn( "{}").setResultsName('content') + cl + semicolon exceptionDef = Word("exception").setResultsName('type') + identifier.setResultsName('name') + op + CharsNotIn( "{}").setResultsName('content') + cl + semicolon raiseDef = Suppress(Word("throws")) + typeIdentifier + ZeroOrMore(Literal(',') + typeIdentifier) decoratorDef = Literal('idempotent') | Literal('out') retValDef = typeIdentifier.setResultsName('ret') firstParam = Group(Optional(decoratorDef.setResultsName('decorator')) + typeIdentifier.setResultsName( 'type') + identifier.setResultsName('name')) nextParam = Suppress(Word(',')) + firstParam params = firstParam + ZeroOrMore(nextParam) remoteMethodDef = Group(Optional(decoratorDef.setResultsName('decorator')) + retValDef.setResultsName( 'ret') + typeIdentifier.setResultsName('name') + opp + Optional(params).setResultsName( 'params') + clp + Optional(raiseDef.setResultsName('raise')) + semicolon) interfaceDef = Word('interface').setResultsName('type') + typeIdentifier.setResultsName('name') + op + Group( ZeroOrMore(remoteMethodDef)).setResultsName('methods') + cl + semicolon moduleContent = Group(structDef | enumDef | exceptionDef | dictionaryDef | sequenceDef | interfaceDef) module = Suppress(Word("module")) + identifier.setResultsName("name") + op + ZeroOrMore( moduleContent).setResultsName("contents") + cl + semicolon IDSL = idslImports.setResultsName("imports") + module.setResultsName("module") IDSL.ignore(cppStyleComment) return IDSL
def define_document_grammar(self): """ Define document grammar rules """ """ Backus Naur Form (BNF) chars ::= a-zA-Z numbers ::= 0-1 word ::= chars|numbers+ number ::= numbers+ quote ::= '"' comma ::= ',' name ::= quote word+ quote class ::= number+ age ::= number+ sex ::= word survived ::= number entry ::= name class agen sex survived """ quote = Literal('"') comma = Literal(',') name = Suppress(quote) + OneOrMore(Word(alphas) | Suppress(comma)) + Suppress(quote) ship_class = Word(alphanums) age = Word(nums) sex = Word(alphanums) survived = Word(nums) entry = name.setResultsName('name') + ship_class.setResultsName('ship_class') \ + age.setResultsName('age') + sex.setResultsName('sex') \ + survived.setResultsName('survived') # store final expression self.final_expression = entry
def simple_query(): # IGNORE:too-many-locals ''' Grammar for simple queries <simple-query> ::= 'SELECT' ['DISTINCT'] ['TOP('<integer>')'] <select-term> (',' <select-term>)* 'FROM' <table-term> (',' <table-term>)* [<where-condition>] [['ACCORDING' 'TO'] 'PREFERENCES' <theory-grammar>] ['GROUP BY' <identifier> (',' <identifier>)* ] ''' from grammar.keywords import AND_KEYWORD, \ ACCORDING_KEYWORD, TO_KEYWORD, DISTINCT_KEYWORD, \ PREFERENCES_KEYWORD, SELECT_KEYWORD, FROM_KEYWORD, WHERE_KEYWORD, \ GROUP_KEYWORD, BY_KEYWORD, OR_KEYWORD from grammar.basic import attribute_term from grammar.symbols import COMMA from grammar.theory import TheoryGrammar from grammar.parsed import ParsedSimpleQuery select_clause = \ Suppress(SELECT_KEYWORD) + \ Optional(DISTINCT_KEYWORD).setResultsName('distinct') + \ Optional(top_term()).setResultsName('top') + \ delimitedList(select_term(), COMMA).setResultsName('selected') group_by_clause = Suppress(GROUP_KEYWORD) + Suppress(BY_KEYWORD) + \ delimitedList(attribute_term(), COMMA) preference_clause = Optional(Suppress(ACCORDING_KEYWORD + TO_KEYWORD)) + \ Suppress(PREFERENCES_KEYWORD) + TheoryGrammar.grammar() where_single = Group(where_term()) where_or = Suppress(OR_KEYWORD) + \ delimitedList(Group(where_term()), OR_KEYWORD) where_and = Suppress(AND_KEYWORD) + \ delimitedList(Group(where_term()), AND_KEYWORD) where_t = (where_or.setResultsName('where_or') | where_and.setResultsName('where_and')) simple_q = select_clause.setResultsName('select_clause') + \ Suppress(FROM_KEYWORD) + \ delimitedList(table_term(), COMMA).setResultsName('from_clause') + \ Optional(Suppress(WHERE_KEYWORD) + where_single.setResultsName('where_clause') + Optional(where_t)) + \ Optional(group_by_clause.setResultsName('group_clause') | preference_clause.setResultsName('preference_clause')) simple_q.setParseAction(ParsedSimpleQuery) return simple_q
def get_fragment_grammar(): # Match header [mapping] header = Suppress("[") + Suppress("mapping") + Suppress("]") # There are three possible patterns for mapping entries: # obj:symbol (scheme) # obj (scheme) # * (scheme) obj = Fragment.ENTITY.setResultsName("object") symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol") scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName( "scheme") + Suppress(")") pattern1 = Group(obj + symbol + scheme) pattern2 = Group(obj + scheme) pattern3 = Group( Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme) mapping_entry = pattern1 | pattern2 | pattern3 # To simplify parsing, classify groups of condition-mapping entry into two types: normal and default # A normal grouping is one with a non-default condition. The default grouping is one which contains the # default condition mapping_entries = Group( ZeroOrMore(mapping_entry)).setResultsName("mappings") normal_condition = Suppress(":") + originalTextFor( SDKConfig.get_expression_grammar()) default_condition = Optional( Suppress(":") + Literal(Mapping.DEFAULT_CONDITION)) normal_group = Group( normal_condition.setResultsName("condition") + mapping_entries) default_group = Group(default_condition + mapping_entries).setResultsName("default_group") normal_groups = Group( ZeroOrMore(normal_group)).setResultsName("normal_groups") # Any mapping fragment definition can have zero or more normal group and only one default group as a last entry. archive = Suppress("archive") + Suppress( ":") + Fragment.ENTITY.setResultsName("archive") entries = Suppress("entries") + Suppress(":") + ( normal_groups + default_group).setResultsName("entries") mapping = Group(header + archive + entries) mapping.setParseAction(lambda t: Mapping(t[0].archive, t[0].entries)) mapping.ignore("#" + restOfLine) return mapping
def fromString(inputText, verbose=False): if verbose: print 'Verbose:', verbose text = nestedExpr("/*", "*/").suppress().transformString(inputText) semicolon = Suppress(Word(";")) quote = Suppress(Word("\"")) op = Suppress(Word("{")) cl = Suppress(Word("}")) opp = Suppress(Word("(")) clp = Suppress(Word(")")) identifier = Word( alphas+"_", alphanums+"_" ) commIdentifier = Group(identifier.setResultsName('identifier') + Optional(opp + (CaselessLiteral("ice")|CaselessLiteral("ros")).setResultsName("type") + clp)) # Imports idslImport = Suppress(CaselessLiteral("import")) + quote + CharsNotIn("\";").setResultsName('path') + quote + semicolon idslImports = ZeroOrMore(idslImport) # Communications implementsList = Group(CaselessLiteral('implements') + identifier + ZeroOrMore(Suppress(Word(',')) + identifier) + semicolon) requiresList = Group(CaselessLiteral('requires') + identifier + ZeroOrMore(Suppress(Word(',')) + identifier) + semicolon) subscribesList = Group(CaselessLiteral('subscribesTo') + commIdentifier + ZeroOrMore(Suppress(Word(',')) + commIdentifier) + semicolon) publishesList = Group(CaselessLiteral('publishes') + identifier + ZeroOrMore(Suppress(Word(',')) + identifier) + semicolon) communicationList = implementsList | requiresList | subscribesList | publishesList communications = Group( Suppress(CaselessLiteral("communications")) + op + ZeroOrMore(communicationList) + cl + semicolon) # Language language = Suppress(CaselessLiteral("language")) + (CaselessLiteral("cpp")|CaselessLiteral("python")) + semicolon # GUI gui = Group(Optional(Suppress(CaselessLiteral("gui")) + CaselessLiteral("Qt") + opp + identifier + clp + semicolon )) # additional options options = Group(Optional(Suppress(CaselessLiteral("options")) + identifier + ZeroOrMore(Suppress(Word(',')) + identifier) + semicolon)) componentContents = communications.setResultsName('communications') & language.setResultsName('language') & gui.setResultsName('gui') & options.setResultsName('options') component = Suppress(CaselessLiteral("component")) + identifier.setResultsName("name") + op + componentContents.setResultsName("properties") + cl + semicolon CDSL = idslImports.setResultsName("imports") + component.setResultsName("component") CDSL.ignore( cppStyleComment ) tree = CDSL.parseString(text) return CDSLParsing.component(tree)
def _parse_query(self, query): operator = oneOf("= > >= < <= ~ ! !~") joiner = oneOf('AND OR') _from = Suppress(Literal('FROM')) + Word(printables) _from = _from.setResultsName('FROM') _select = Suppress(Literal('SELECT')) + Word(printables) _select = _select.setResultsName('SELECT') _val = QuotedString('"', escQuote="'", escChar='\\') _val = _val.setResultsName('VAL') _conditional = Word(printables) + operator + _val _conditional = _conditional.setResultsName('COND') lparen, rparen = Literal('('), Literal(')') _where = Suppress(Literal('WHERE')) + ZeroOrMore(lparen) + OneOrMore(Group(_conditional) + ZeroOrMore(joiner)) + ZeroOrMore(rparen) _where = _where.setResultsName('WHERE') _query = _from + _select + _where self.parsed_query = _query.parseString(query, parseAll=True)
def parse_issue(issue): """Given an issue, return data that needs saving""" from pyparsing import Word, Suppress, Literal, OneOrMore, Optional, nums, printables sprint = (Literal('Sprint') | Literal('sprint')) + Word(nums) points = Suppress(Literal('(')) + Word(nums) + Suppress(Literal(')')) deadline = Suppress(Literal('<')) + Word(nums) name = OneOrMore(Word(printables)) title = Optional(sprint).setResultsName("sprint") + \ Optional(name).setResultsName("name") + \ Optional(deadline).setResultsName("deadline") + \ points.setResultsName("points") try: _, r = title.parseString(issue['title']) print r except: pass
def fromString(inputText, verbose=False): if verbose: print 'Verbose:', verbose text = nestedExpr("/*", "*/").suppress().transformString(inputText) semicolon = Suppress(Word(";")) quote = Suppress(Word("\"")) op = Suppress(Word("{")) cl = Suppress(Word("}")) opp = Suppress(Word("(")) clp = Suppress(Word(")")) lt = Suppress(Word("<")) gt = Suppress(Word(">")) identifier = Word(alphas+"_",alphanums+"_") typeIdentifier = Word(alphas+"_",alphanums+"_:") ## Imports idslImport = Suppress(Word("import")) + quote + CharsNotIn("\";").setResultsName('path') + quote + semicolon idslImports = ZeroOrMore(idslImport) dictionaryDef = Word("dictionary") + lt + CharsNotIn("<>;") + gt + identifier.setResultsName('name') + semicolon sequenceDef = Word("sequence") + lt + CharsNotIn("<>;") + gt + identifier.setResultsName('name') + semicolon enumDef = Word("enum") + identifier.setResultsName('name') + op + CharsNotIn("{}") + cl + semicolon structDef = Word("struct") + identifier.setResultsName('name') + op + CharsNotIn("{}") + cl + semicolon exceptionDef = Word("exception") + identifier.setResultsName('name') + op + CharsNotIn("{}") + cl + semicolon raiseDef = Suppress(Word("throws")) + typeIdentifier + ZeroOrMore( Literal(',') + typeIdentifier ) decoratorDef = Literal('idempotent') | Literal('out') retValDef = typeIdentifier.setResultsName('ret') firstParam = Group( Optional(decoratorDef.setResultsName('decorator')) + typeIdentifier.setResultsName('type') + identifier.setResultsName('name')) nextParam = Suppress(Word(',')) + firstParam params = firstParam + ZeroOrMore(nextParam) remoteMethodDef = Group(Optional(decoratorDef) + retValDef + typeIdentifier.setResultsName('name') + opp + Optional( params).setResultsName('params') + clp + Optional(raiseDef) + semicolon ) interfaceDef = Word("interface") + typeIdentifier.setResultsName('name') + op + Group(ZeroOrMore(remoteMethodDef)) + cl + semicolon moduleContent = Group(structDef | enumDef | exceptionDef | dictionaryDef | sequenceDef | interfaceDef) module = Suppress(Word("module")) + identifier.setResultsName("name") + op + ZeroOrMore(moduleContent).setResultsName("contents") + cl + semicolon IDSL = idslImports.setResultsName("imports") + module.setResultsName("module") IDSL.ignore( cppStyleComment ) tree = IDSL.parseString(text) return IDSLParsing.module(tree)
def detect_token(jade): doctype = LineStart() + oneOf('!!! doctype') + Optional(oneOf('5 html xml' \ + ' default transitional strict frameset 1.1 basic mobile', True)) doctype.setParseAction(parse_doctype) element_id = Suppress('#') + Word(alphanums + '_' + '-') element_class = Suppress('.') + Word(alphanums + '_' + '-') selectors = (element_id.setResultsName('element_id') \ + ZeroOrMore(element_class).setResultsName('element_class')) \ | (OneOrMore(element_class).setResultsName('element_class') \ + Optional(element_id).setResultsName('element_id')) selectors.setParseAction(parse_selectors) element = selectors.setResultsName('selectors') \ | (Word(alphas).setResultsName('element_name') \ + Optional(selectors).setResultsName('selectors')) element.setParseAction(parse_element) attribute = CharsNotIn('('+')') attributes = nestedExpr(content=attribute) tag = element.setResultsName('element') \ + Optional(attributes).setResultsName('attributes') tag.setParseAction(parse_tag) # TODO: block-comment and conditional-comment unbuffered_comment = Suppress(Suppress('//-') + restOfLine) buffered_comment = Suppress('//') + restOfLine buffered_comment.setParseAction(parse_buffered_comment) # Order matters here, as buffered will pick up # unbuffered comments if set first comment = unbuffered_comment | buffered_comment source = doctype | tag | comment parsed = source.parseString(jade) return ' '.join(parsed) '''
def get_fragment_grammar(): # Match header [mapping] header = Suppress("[") + Suppress("mapping") + Suppress("]") # There are three possible patterns for mapping entries: # obj:symbol (scheme) # obj (scheme) # * (scheme) obj = Fragment.ENTITY.setResultsName("object") symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol") scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")") pattern1 = Group(obj + symbol + scheme) pattern2 = Group(obj + scheme) pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme) mapping_entry = pattern1 | pattern2 | pattern3 # To simplify parsing, classify groups of condition-mapping entry into two types: normal and default # A normal grouping is one with a non-default condition. The default grouping is one which contains the # default condition mapping_entries = Group(ZeroOrMore(mapping_entry)).setResultsName("mappings") normal_condition = Suppress(":") + originalTextFor(SDKConfig.get_expression_grammar()) default_condition = Optional(Suppress(":") + Literal(Mapping.DEFAULT_CONDITION)) normal_group = Group(normal_condition.setResultsName("condition") + mapping_entries) default_group = Group(default_condition + mapping_entries).setResultsName("default_group") normal_groups = Group(ZeroOrMore(normal_group)).setResultsName("normal_groups") # Any mapping fragment definition can have zero or more normal group and only one default group as a last entry. archive = Suppress("archive") + Suppress(":") + Fragment.ENTITY.setResultsName("archive") entries = Suppress("entries") + Suppress(":") + (normal_groups + default_group).setResultsName("entries") mapping = Group(header + archive + entries) mapping.setParseAction(lambda t: Mapping(t[0].archive, t[0].entries)) mapping.ignore("#" + restOfLine) return mapping
type = Suppress(Word("type")) + identifier file = Suppress(Word("file")) + identifier img = Group( Suppress(Word("img")) + identifier + ZeroOrMore(Suppress(",") + identifier)) name = Suppress(Word("name")) + identifier typeblock = Suppress(Word("blocktype")) + identifier var = identifier.setResultsName("type") + identifier.setResultsName( "varName") + Word(nums).setResultsName("defaultValue") variables = Suppress(Word("variables")) + op + Group(var) + ZeroOrMore( Group(var)) + cl block = Group( CaselessLiteral("block") + op + Group( type.setResultsName("type") + name.setResultsName("name") + file.setResultsName("file") + Optional(variables.setResultsName("variables")) + img.setResultsName("img") + cl)) parser = block + ZeroOrMore(block) config = """ block{ type operador name + file None img blocks/block4, blocks/block3 blocktype simple }
def get_fragment_grammar(sdkconfig, fragment_file): # Match header [mapping] header = Suppress("[") + Suppress("mapping") + Suppress("]") # There are three possible patterns for mapping entries: # obj:symbol (scheme) # obj (scheme) # * (scheme) obj = Fragment.ENTITY.setResultsName("object") symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol") scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName( "scheme") + Suppress(")") pattern1 = Group(obj + symbol + scheme) pattern2 = Group(obj + scheme) pattern3 = Group( Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme) mapping_entry = pattern1 | pattern2 | pattern3 # To simplify parsing, classify groups of condition-mapping entry into two types: normal and default # A normal grouping is one with a non-default condition. The default grouping is one which contains the # default condition mapping_entries = Group( ZeroOrMore(mapping_entry)).setResultsName("mappings") normal_condition = Suppress(":") + originalTextFor( SDKConfig.get_expression_grammar()) default_condition = Optional( Suppress(":") + Literal(DeprecatedMapping.DEFAULT_CONDITION)) normal_group = Group( normal_condition.setResultsName("condition") + mapping_entries) default_group = Group(default_condition + mapping_entries).setResultsName("default_group") normal_groups = Group( ZeroOrMore(normal_group)).setResultsName("normal_groups") # Any mapping fragment definition can have zero or more normal group and only one default group as a last entry. archive = Suppress("archive") + Suppress( ":") + Fragment.ENTITY.setResultsName("archive") entries = Suppress("entries") + Suppress(":") + ( normal_groups + default_group).setResultsName("entries") mapping = Group(header + archive + entries) mapping.ignore("#" + restOfLine) def parsed_deprecated_mapping(pstr, loc, toks): fragment = Mapping() fragment.archive = toks[0].archive fragment.name = re.sub(r"[^0-9a-zA-Z]+", "_", fragment.archive) fragment.deprecated = True fragment.entries = set() condition_true = False for entries in toks[0].entries[0]: condition = next(iter(entries.condition.asList())).strip() condition_val = sdkconfig.evaluate_expression(condition) if condition_val: for entry in entries[1]: fragment.entries.add( (entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme)) condition_true = True break if not fragment.entries and not condition_true: try: entries = toks[0].entries[1][1] except IndexError: entries = toks[0].entries[1][0] for entry in entries: fragment.entries.add( (entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme)) if not fragment.entries: fragment.entries.add(("*", None, "default")) dep_warning = str( ParseFatalException( pstr, loc, "Warning: Deprecated old-style mapping fragment parsed in file %s." % fragment_file)) print(dep_warning) return fragment mapping.setParseAction(parsed_deprecated_mapping) return mapping
header = dollar + restOfLine parseKey = (Word(alphas, max=2)).setResultsName("atomLabel") + \ (restOfLine).setResultsName("basisName") basisFormat = delimitedList(Word(nums), '/') contraction = Suppress(parenthesis + slash + brackets) + openBra + basisFormat + closeBra basisHeader = natural + restOfLine parseContr = pound + Suppress(Word(alphas, max=2)) + contraction parseCoeff = Suppress(basisHeader) + OneOrMore(floatNumber) parseBasisData = OneOrMore(Group(parseCoeff.setResultsName("contractions"))) parseBasis = (star + parseKey + parseContr.setResultsName("format") + star + parseBasisData.setResultsName("coeffs")) topParseB = Suppress(header) + OneOrMore(Group(parseBasis)) # ==================> MOs <================== headerMO = Suppress(SkipTo(Literal("[MO]")) + restOfLine) sym = Literal("Sym") + restOfLine spin = Literal("Spin") + restOfLine occ = Literal("Occ") + restOfLine
def subjects(): subjects = Suppress('[') + delimitedList(subject(), ',') + Suppress(']') return subjects.setResultsName('subjects')
class BgpPolicyParser: """Parser class""" def __init__(self, network): self.network = network self.g_business_relationship = nx.DiGraph() self.user_defined_sets = {} self.user_library_calls = [] self.user_defined_functions = {} # Grammars #TODO: tidy this up attribute_unnamed = Word(alphanums+'_'+".") attribute = attribute_unnamed.setResultsName("attribute") self.attribute = attribute lt = Literal("<").setResultsName("<") le = Literal("<=").setResultsName("<=") eq = Literal("=").setResultsName("=") ne = Literal("!=").setResultsName("!=") ge = Literal(">=").setResultsName(">=") gt = Literal(">").setResultsName(">") wildcard = Literal("*").setResultsName("wildcard") self.wildcard = wildcard self.prefix_lists = {} self.tags_to_allocate = set() self.allocated_tags = {} self._opn = { '<': operator.lt, '<=': operator.le, '=': operator.eq, '!=': operator.ne, '>=': operator.ge, '>': operator.gt, '&': set.intersection, '|': set.union, } # map alphanum chars to alphanum equivalents for use in tags self._opn_to_tag = { '<': "lt", '<=': "le", '=': "eq", '!=': "ne", '>=': "ge", '>': "gt", '&': "and", '|': "or", } # Both are of comparison to access in same manner when evaluating comparison = (lt | le | eq | ne | ge | gt).setResultsName("comparison") stringComparison = (eq | ne).setResultsName("comparison") # #quoted string is already present float_string = Word(nums).setResultsName("value").setParseAction(lambda t: float(t[0])) integer_string = Word(nums).setResultsName("value").setParseAction(lambda t: int(t[0])) #TODO: use numString, and make integer if fiull stop #TODO: allow parentheses? - should be ok as pass to the python parser ipField = Word(nums, max=3) ipAddress = Combine( ipField + "." + ipField + "." + ipField + "." + ipField ).setResultsName("ipAddress") boolean_and = Literal("&").setResultsName("&") boolean_or = Literal("|").setResultsName("|") boolean = (boolean_and | boolean_or).setResultsName("boolean") self._boolean = boolean # need to use in checking #TODO fix this matching 2a.ab when that should match a string numericQuery = Group(attribute + comparison + float_string).setResultsName( "numericQuery") stringValues = (attribute_unnamed | quotedString.setParseAction(removeQuotes) ).setResultsName("value") stringQuery = Group(attribute + stringComparison + stringValues).setResultsName( "stringQuery") wildcardQuery = wildcard.setResultsName("wildcardQuery") singleQuery = numericQuery | stringQuery | wildcardQuery singleQuery.setFailAction(parse_fail_action) self.nodeQuery = singleQuery + ZeroOrMore(boolean + singleQuery) self.u_egress = Literal("egress->").setResultsName("u_egress") self.v_ingress = Literal("->ingress").setResultsName("v_ingress") self.u_ingress = Literal("ingress<-").setResultsName("u_ingress") self.v_egress = Literal("<-egress").setResultsName("v_egress") edgeType = ( self.u_egress | self.u_ingress | self.v_egress | self.v_ingress).setResultsName("edgeType").setFailAction(parse_fail_action) self.edgeQuery = ("(" + self.nodeQuery.setResultsName("query_a") + ")" + edgeType + "(" + self.nodeQuery.setResultsName("query_b") + ")").setFailAction(parse_fail_action) #start of BGP queries originQuery = (Literal("Origin").setResultsName("attribute") + #this is a workaround for the match, comparison, value 3-tuple in processing Literal("(").setResultsName("comparison") + Group(self.nodeQuery).setResultsName("value") + Suppress(")")).setResultsName("originQuery") transitQuery = (Literal("Transit").setResultsName("attribute") + #this is a workaround for the match, comparison, value 3-tuple in processing Literal("(").setResultsName("comparison") + Group(self.nodeQuery).setResultsName("value") + Suppress(")")).setResultsName("transitQuery") prefixList = Literal("prefix_list") matchPl = (prefixList.setResultsName("attribute") + comparison + attribute.setResultsName("value")) matchTag = (Literal("tag").setResultsName("attribute") + comparison + attribute.setResultsName("value")) #tags contain -> tag = aaa inTags = ( Literal("tags").setResultsName("attribute").setParseAction(lambda x: "tag") + Literal("contain").setResultsName("comparison").setParseAction(lambda x: "=") + attribute_unnamed.setResultsName("value") ) bgpMatchQuery = Group(matchPl | matchTag | inTags | originQuery | transitQuery ).setResultsName("bgpMatchQuery").setFailAction(parse_fail_action) self.bgpMatchQuery = bgpMatchQuery setLP = (Literal("setLP").setResultsName("attribute") + integer_string.setResultsName("value")).setResultsName("setLP") setMED = (Literal("setMED").setResultsName("attribute") + integer_string.setResultsName("value")).setResultsName("setMED") addTag = (Literal("addTag").setResultsName("attribute") + attribute.setResultsName("value")).setResultsName("addTag") removeTag = (Literal("removeTag").setResultsName("attribute") + attribute.setResultsName("value")).setResultsName("removeTag") #TODO: need to set blank value reject = Literal("reject") #TODO: remove once move quagga output inside module self.reject = reject rejectAction = (reject.setResultsName("attribute") + Literal("route").setResultsName("value")).setResultsName("reject") setNextHop = (Literal("setNextHop").setResultsName("attribute") + ipAddress.setResultsName("value")).setResultsName("setNextHop") setOriginAttribute = (Literal("setOriginAttribute").setResultsName("attribute") + (oneOf("IGP BGP None").setResultsName("value"))).setResultsName("setOriginAttribute") bgpAction = Group(addTag | setLP | setMED | removeTag | setNextHop | setOriginAttribute | rejectAction).setResultsName("bgpAction") # The Clauses ifClause = Group(Suppress("if") + bgpMatchQuery + ZeroOrMore(Suppress(boolean_and) + bgpMatchQuery)).setResultsName("if_clause") actionClause = bgpAction + ZeroOrMore(Suppress(boolean_and) + bgpAction) thenClause = Group(Suppress("then") + actionClause).setResultsName("then_clause") ifThenClause = Group(Suppress("(") + ifClause + thenClause + Suppress(")")).setResultsName("ifThenClause") elseActionClause = Group(Suppress("(") + actionClause + Suppress(")")).setResultsName("else_clause") # Support actions without a condition (ie no "if") unconditionalAction = Group(Suppress("(") + Group(actionClause).setResultsName("unconditionalActionClause") + Suppress(")")).setResultsName("bgpSessionQuery") # Query may contain itself (nested) bgpSessionQuery = Forward() bgpSessionQuery << ( ifThenClause + Optional( Suppress("else") + (elseActionClause | bgpSessionQuery)) ).setResultsName("bgpSessionQuery") bgpSessionQuery = bgpSessionQuery | unconditionalAction self.bgpSessionQuery = bgpSessionQuery self.bgpApplicationQuery = self.edgeQuery + Suppress(":") + self.bgpSessionQuery # Library stuff set_values = Suppress("{") + delimitedList( attribute, delim=',').setResultsName("set_values") + Suppress("}") #Set to empty set, rather than empty list as empty list is processed differently somewhere in parser empty_set = Literal("{}").setResultsName("set_values").setParseAction(lambda x: set()) self.set_definition = attribute.setResultsName("set_name") + Suppress("=") + (empty_set | set_values) library_params = attribute | Group(set_values) | empty_set library_function = attribute.setResultsName("def_name") + Suppress("(") + delimitedList( library_params, delim=',').setResultsName("def_params") + Suppress(")") library_function.setFailAction(parse_fail_action) self.library_def = Suppress("define") + library_function self.library_call = Suppress("apply") + library_function self.library_def.setFailAction(parse_fail_action) self.library_edge_query = (self.attribute.setResultsName("query_a") + edgeType + self.attribute.setResultsName("query_b")) self.library_edge_query.setFailAction(parse_fail_action) library_edge_definition = self.library_edge_query + Suppress(":") + self.bgpSessionQuery library_global_definition = "global tags = {" + delimitedList( attribute, delim=',').setResultsName("tags") + "}" self.library_entry = library_global_definition.setResultsName("global_tags") | library_edge_definition.setResultsName("library_edge") self.library_entry.setFailAction(parse_fail_action) self.bgpPolicyLine = ( self.bgpApplicationQuery.setResultsName("bgpApplicationQuery") | self.library_call.setResultsName("library_call") | self.set_definition.setResultsName("set_definition") ) #TODO: allow shorthand of (1) -> (2) for (asn=1) -> (asn=2) def clear_policies(self): for src, dst in self.network.g_session.edges(): self.network.g_session[src][dst]['ingress'] = [] self.network.g_session[src][dst]['egress'] = [] def apply_bgp_policy(self, qstring): """Applies policy to network >>> inet = ank.internet.Internet("2routers") >>> inet.compile() >>> node_a = inet.network.find("a.AS1") >>> node_b = inet.network.find("b.AS2") >>> pol_parser = ank.BgpPolicyParser(inet.network) >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (setLP 200)") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [] then [setLP 200] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (setMED 200)") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [] then [setMED 200] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (*): (setMED 200)") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [] then [setMED 200] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (if tag = test then setLP 100)") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [tag = test] then [setLP 100] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (if tags contain test then setLP 100)") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [tag = test] then [setLP 100] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (if prefix_list = pl_asn_eq_2 then addTag cl_asn_eq_2))") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [prefix_list = pl_asn_eq_2] then [addTag cl_asn_eq_2] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (addTag ABC & setLP 90))") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [] then [addTag ABC, setLP 90] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (if Origin(asn=2) then addTag a100 ))") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [tag = origin_cl_asn_eq_2] then [addTag a100] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (if Transit(asn=2) then addTag a100 ))") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [tag = transit_cl_asn_eq_2] then [addTag a100] reject: False]] >>> pol_parser.clear_policies() >>> pol_parser.apply_bgp_policy("(asn=1) ->ingress (asn=2): (if Transit(asn=2) then addTag a100 ))") >>> inet.network.g_session[node_a][node_b]['ingress'] [[if [tag = transit_cl_asn_eq_2] then [addTag a100] reject: False]] >>> pol_parser = ank.BgpPolicyParser(ank.network.Network(ank.load_example("multias"))) #TODO: move these tests out Testing internals: >>> attributestring = "2a.as1" >>> result = pol_parser.attribute.parseString(attributestring) Node and edge queries:: >>> nodestring = "node = '2ab.ab'" >>> result = pol_parser.nodeQuery.parseString(nodestring) >>> result = pol_parser.edgeQuery.parseString("(" + nodestring + ") egress-> (node = b)") >>> result = pol_parser.edgeQuery.parseString("(node = a.b) egress-> (node = b)") Full policy queries:: >>> pol_parser.apply_bgp_policy("(node = '2a.AS2') egress-> (*): (if prefix_list = pl_asn_eq_2 then addTag cl_asn_eq_2)") >>> pol_parser.apply_bgp_policy("(Network = AS1 ) ->ingress (Network = AS2): (if tag = deprefme then setLP 90) ") >>> pol_parser.apply_bgp_policy("(Network = AS1 ) ->ingress (Network = AS2): (addTag ABC & setLP 90) ") >>> pol_parser.apply_bgp_policy("(asn = 1) egress-> (asn = 1): (if Origin(asn=2) then addTag a100 )") >>> pol_parser.apply_bgp_policy("(asn = 1) egress-> (asn = 1): (if Transit(asn=2) then addTag a100 )") >>> pol_parser.apply_bgp_policy("(node = a_b ) ->ingress (Network = AS2): (addTag ABC & setLP 90) ") >>> pol_parser.apply_bgp_policy("(node = a_b ) ->ingress (Network = AS2): (if Transit(asn=2) then addTag a100 ) ") """ LOG.debug("Applying BGP policy %s" % qstring) result = self.bgpPolicyLine.parseString(qstring) if 'set_definition' in result: LOG.debug("Storing set definition %s" % result.set_name) self.user_defined_sets[result.set_name] = set(a for a in result.set_values) return if 'library_call' in result: def_params = [] for param in result.def_params: if isinstance(param, basestring): def_params.append(param) else: # is a sequence, extract value def_params.append([p for p in param]) self.user_library_calls.append( (result.def_name, def_params)) return LOG.debug("Query string is %s " % qstring) set_a = self.node_select_query(result.query_a) LOG.debug("Set a is %s " % set_a) set_b = self.node_select_query(result.query_b) LOG.debug("Set b is %s " % set_b) select_type = result.edgeType per_session_policy = self.process_if_then_else(result.bgpSessionQuery) # use nbunch feature of networkx to limit edges to look at node_set = set_a | set_b edges = self.network.g_session.edges(node_set) #LOG.debug("Edges are %s " % edges) # 1 ->, 2 <-, 3 <-> def select_fn_u_to_v( (u, v), src_set, dst_set): """ u -> v""" return (u in src_set and v in dst_set) def select_fn_u_from_v( (u, v), src_set, dst_set): """ u <- v""" return (u in dst_set and v in src_set)
Optional(Group('@' + Word(alphanums + '_-')).setResultsName('moleculeCompartment'))) species_definition = Group(Optional(Group('@' + Word(alphanums + '_')).setResultsName('speciesCompartment') + Suppress(':')) + delimitedList(molecule_instance, delim='.').setResultsName('speciesPattern')) reaction_definition = Group(Group(delimitedList(species_definition, delim='+')).setResultsName('reactants') + (uni_arrow | bi_arrow) + Group(delimitedList(species_definition, delim='+')).setResultsName('products') + Group(lbracket + (numarg | (identifier + Suppress(Optional('()')))) + Optional(comma + (numarg | (identifier + Suppress(Optional('()'))))) + rbracket).setResultsName('rate')) # generic hash section grammar hashed_section = (hashsymbol + Group(OneOrMore(name) + section_enclosure2_)) # hash system_constants # system_constants = Group() hashed_system_constants = Group(hashsymbol + Suppress(system_constants_) + lbrace + OneOrMore(statement) + rbrace) # hash molecule_entry diffusion_entry_ = Group((diffusion_constant_2d_.setResultsName('2D') | diffusion_constant_3d_.setResultsName('3D')) + Suppress(equal) + (function_entry_.setResultsName('function') | (identifier | numarg).setResultsName('variable'))) molecule_entry = Group(molecule_definition + Optional(Group(lbrace + Optional(diffusion_entry_.setResultsName('diffusionFunction')) + (ZeroOrMore(statement)).setResultsName('moleculeParameters') + rbrace))) hashed_molecule_section = Group(hashsymbol + Suppress(define_molecules_) + lbrace + OneOrMore(molecule_entry) + rbrace) # hash function entry function_name = Group(identifier + '()') math_function_entry = Group(function_name.setResultsName('functionName') + Suppress(equal) + Group(restOfLine).setResultsName('functionBody')) hashed_function_section = Group(hashsymbol + Suppress(define_functions_) + lbrace + ZeroOrMore(math_function_entry) + rbrace) # hash reaction entry hashed_reaction_section = Group(hashsymbol + Suppress(define_reactions_) + lbrace + OneOrMore(reaction_definition) + rbrace) # hash observable entry count_definition = Group(count_ + lbracket + species_definition.setResultsName('speciesPattern') + Suppress(',') + identifier + rbracket) observable_entry = Group(lbrace + Group(delimitedList(count_definition, delim='+')).setResultsName('patterns') + rbrace + Suppress('=>') + quotedString.setResultsName('outputfile'))
def bytes_requested(): br = Suppress('(') + Word(nums) + Suppress('bytes)') return br.setResultsName('bytes_requested')
Optional(Group('@' + Word(alphanums + '_-')).setResultsName('moleculeCompartment'))) species_definition = Group(Optional(Group('@' + Word(alphanums + '_')).setResultsName('speciesCompartment') + Suppress('::')) + delimitedList(molecule_instance, delim='.').setResultsName('speciesPattern')) reaction_definition = Group(Group(delimitedList(species_definition, delim='+')).setResultsName('reactants') + (uni_arrow | bi_arrow) + Group(delimitedList(species_definition, delim='+')).setResultsName('products') + Group(lbracket + (numarg | (identifier + Suppress(Optional('()')))) + Optional(comma + (numarg| (identifier + Suppress(Optional('()'))))) + rbracket).setResultsName('rate')) # generic hash section grammar hashed_section = (hashsymbol + Group(OneOrMore(name) + section_enclosure2_)) #hash system_constants #system_constants = Group() hashed_system_constants = Group(hashsymbol + Suppress(system_constants_) + lbrace + OneOrMore(statement) + rbrace) # hash molecule_entry diffusion_entry_ = Group((diffusion_constant_2d_.setResultsName('2D') | diffusion_constant_3d_.setResultsName('3D')) + Suppress(equal) + (function_entry_.setResultsName('function') | (identifier | numarg).setResultsName('variable'))) molecule_entry = Group(molecule_definition + Optional(Group(lbrace + Optional(diffusion_entry_.setResultsName('diffusionFunction')) + (ZeroOrMore(statement)).setResultsName('moleculeParameters') + rbrace))) hashed_molecule_section = Group(hashsymbol + Suppress(define_molecules_) + lbrace + OneOrMore(molecule_entry) + rbrace) #hash function entry function_name = Group(identifier + '()') math_function_entry = Group(function_name.setResultsName('functionName') + Suppress(equal) + Group(restOfLine).setResultsName('functionBody')) hashed_function_section = Group(hashsymbol + Suppress(define_functions_) + lbrace + ZeroOrMore(math_function_entry) +rbrace) # hash reaction entry hashed_reaction_section = Group(hashsymbol + Suppress(define_reactions_) + lbrace + OneOrMore(reaction_definition) + rbrace) # hash observable entry count_definition = Group(count_ + lbracket + species_definition.setResultsName('speciesPattern') + Suppress(',') + identifier + rbracket) observable_entry = Group(lbrace + Group(delimitedList(count_definition, delim='+')).setResultsName('patterns') + rbrace + Suppress('=>') + quotedString.setResultsName('outputfile'))
(numarg | (identifier + Suppress(Optional('()'))))) + rbracket).setResultsName('rate')) # generic hash section grammar hashed_section = (hashsymbol + Group(OneOrMore(name) + section_enclosure2_)) #hash system_constants #system_constants = Group() hashed_system_constants = Group(hashsymbol + Suppress(system_constants_) + lbrace + OneOrMore(statement) + rbrace) # hash molecule_entry diffusion_entry_ = Group((diffusion_constant_2d_.setResultsName('2D') | diffusion_constant_3d_.setResultsName('3D')) + Suppress(equal) + (function_entry_.setResultsName('function') | (identifier | numarg).setResultsName('variable'))) molecule_entry = Group(molecule_definition + Optional( Group(lbrace + Optional(diffusion_entry_.setResultsName('diffusionFunction')) + (ZeroOrMore(statement)).setResultsName('moleculeParameters') + rbrace))) hashed_molecule_section = Group(hashsymbol + Suppress(define_molecules_) + lbrace + OneOrMore(molecule_entry) + rbrace) #hash function entry function_name = Group(identifier + '()') math_function_entry = Group( function_name.setResultsName('functionName') + Suppress(equal) + Group(restOfLine).setResultsName('functionBody')) hashed_function_section = Group(hashsymbol + Suppress(define_functions_) +