def parse_constraint_group(constraint_group): global valgrind_operations, size_by_var, offset_by_var, realsize_by_var, shift_by_var init_global_vars() lparen = Literal("(") rparen = Literal(")") func = Word(alphanums, alphanums+":_") integer = Word(nums) expression = Forward() arg = expression | func | integer args = arg + ZeroOrMore(","+arg) expression << func + lparen + args + rparen expression.setParseAction(parse_function) valgrind_operations_group = [] for constraint in constraint_group: valgrind_operations = [] expression.parseString(constraint) resize_operands() valgrind_operations_group.append(valgrind_operations) return (valgrind_operations_group, size_by_var, offset_by_var, realsize_by_var, shift_by_var)
def check_boolean_exprs(exprs=None, operand=(), send_error=True): ''' Check whether a boolean expression is properly formed. :param exprs: The string to evaluate. :param operand: The name of the operands. :param send_error: Whether to throw an error if expression is malformed. :return: A boolean. :Example: >>> from pygtftk.utils import check_boolean_exprs >>> assert check_boolean_exprs('s > 1 and (s < 2 or y < 2.5)', operand=['s', 'y']) ''' lparen = Literal("(") rparen = Literal(")") and_operator = CaselessLiteral("and") or_operator = CaselessLiteral("or") comparison_operator = oneOf(['==', '!=', '>', '>=', '<', '<=']) point = Literal('.') exponent = CaselessLiteral('E') plusorminus = Literal('+') | Literal('-') number = Word(nums) integer = Combine(Optional(plusorminus) + number) float_nb = Combine(integer + Optional(point + Optional(number)) + Optional(exponent + integer)) value = float_nb identifier = oneOf(operand, caseless=False) # .setParseAction(_embed) group_1 = identifier + comparison_operator + value group_2 = value + comparison_operator + identifier comparison = group_1 | group_2 boolean_expr = operatorPrecedence(comparison, [(and_operator, 2, opAssoc.LEFT), (or_operator, 2, opAssoc.LEFT)]) boolean_expr_par = lparen + boolean_expr + rparen expression = Forward() expression << boolean_expr | boolean_expr_par try: expression.parseString(exprs, parseAll=True) return True except ParseException as err: if send_error: message(err.msg, force=True) message('Operand should be one of: ' + ", ".join(operand)) message("Boolean expression not supported.", type="ERROR") return False
def parse(text ): global index index+=1 left_parenthesis, right_parenthesis = "(",")" implies = Literal("=>") or_ = Literal("|") and_ = Literal("&") not_ = Literal("~") symbol = (Optional(not_)+ Word(alphas) + left_parenthesis + delimitedList(Word(alphas)) + right_parenthesis).setParseAction(actCombineWord) #, alphanums) term = Forward() term << (Group(symbol + Group(left_parenthesis + delimitedList(term) + right_parenthesis)) | symbol) #term << (Group(symbol + Group(left_parenthesis + symbol + right_parenthesis)) |symbol ) formula = Forward() operand = term formula << operatorPrecedence(operand, [ (and_, 2, opAssoc.LEFT), (or_, 2, opAssoc.LEFT), (implies, 2, opAssoc.RIGHT)]) try: result = formula.parseString(text, parseAll=True) assert len(result) == 1 return result[0].asList() except (ParseException, ParseSyntaxException) as err: print("Syntax error:\n{0.line}\n{1}^".format(err, " " * (err.column - 1))) return []
def parseQuery(queryString): try: parser = Forward(); # parser << (Word(alphas).setResultsName( "first" ) + \ # #(' ').setResultsName( "delim" ) + \ # '*' + Word(alphas).setResultsName( "second")) # selectSpecialStmt = Forward.setResultsName("selectSpecialStmt"); # selectSpecialStmt << (selectSpecialToken + "(" + table_columns + ")" + fromToken \ # + table.setResultsName("table")); selectStmt = Forward().setResultsName("selectStmt"); selectStmt << ( selectToken + ( '*' | func_table_column | table_columns).setResultsName( "columns" ) \ + fromToken + tables.setResultsName("tables") \ + Optional(whereToken + whereExpression.setResultsName("conds") ) ); deleteStmt = Forward().setResultsName("deleteStmt"); deleteStmt << ( deleteToken + table.setResultsName("table") \ + whereToken + whereExpression.setResultsName("conds")); insertStmt = Forward().setResultsName("insertStmt"); insertStmt << ( insertToken + table.setResultsName("table") + valuesToken \ + "(" + intNums.setResultsName("intValues") + ")" ); createStmt = Forward().setResultsName("createStmt"); createStmt << ( createToken + table.setResultsName("table") + "(" \ + Group(delimitedList(column + intToken)).setResultsName("fields") + ")" ); truncateStmt = Forward().setResultsName("truncateStmt"); truncateStmt << ( truncateToken + table.setResultsName("table")); dropStmt = Forward().setResultsName("dropStmt"); dropStmt << ( dropToken + table.setResultsName("table")); parser = selectStmt | insertStmt | deleteStmt | createStmt | truncateStmt | dropStmt | exitToken; tokens = parser.parseString(queryString); # import pdb; pdb.set_trace() return tokens except Exception as e: # print e; print "Error in format." return [];
def parse_template_expression(input): ''' reads template expression like std::shared_ptr<std::vector<std::map<std::string, int>>> >>> parse_template_expression("std::map<std::string, int>") ('std::map', ('std::string', 'int')) https://gist.github.com/fHachenberg/6cb2a44a904cd62934ef0825660bdc67 ''' cpp_name = Word(alphanums + "_:") open_bracket = Literal("<") close_bracket = Literal(">") expr = Forward() expr << cpp_name + Optional( Group( Suppress(open_bracket) + delimitedList(expr) + Suppress(close_bracket))) result = expr.parseString(input, parseAll=True) def rec_mktpl(lst): if isinstance(lst, ParseResults): return tuple([rec_mktpl(l) for l in lst]) else: return lst return rec_mktpl(result)
def transform(txt): idx1 = txt.find('[') idx2 = txt.find('{') if idx1 < idx2 and idx1 > 0: txt = txt[idx1:txt.rfind(']') + 1] elif idx2 < idx1 and idx2 > 0: txt = txt[idx2:txt.rfind('}') + 1] try: json.loads(txt) except: # parse dict-like syntax LBRACK, RBRACK, LBRACE, RBRACE, COLON, COMMA = map(Suppress, "[]{}:,") integer = Regex(r"[+-]?\d+").setParseAction(lambda t: int(t[0])) real = Regex(r"[+-]?\d+\.\d*").setParseAction(lambda t: float(t[0])) string_ = Word(alphas, alphanums + "_") | quotedString.setParseAction(removeQuotes) bool_ = oneOf("true false").setParseAction(lambda t: t[0] == "true") item = Forward() key = string_ dict_ = LBRACE - Optional(dictOf(key + COLON, item + Optional(COMMA))) + RBRACE list_ = LBRACK - Optional(delimitedList(item)) + RBRACK item << (real | integer | string_ | bool_ | Group(list_ | dict_)) result = item.parseString(txt, parseAll=True)[0] print result txt = result return txt
def parse(lines: str, allowed_colors: str = '') -> Union[Rule, Color]: lines = strip_comments(lines) if allowed_colors == '': allowed_colors = string.ascii_lowercase + string.ascii_uppercase color = Word(allowed_colors, exact=1) integer = Word(string.digits).setParseAction(lambda t: int(t[0])) selector = Word(allowed_colors + '-', exact=1) * 9 selector.setParseAction(lambda s: Selector(''.join(s))) num_operation = infixNotation((selector | integer), [ ('*', 2, opAssoc.LEFT, parse_selector_operator), ('/', 2, opAssoc.LEFT, parse_selector_operator), ('+', 2, opAssoc.LEFT, parse_selector_operator), ('-', 2, opAssoc.LEFT, parse_selector_operator), ('%', 2, opAssoc.LEFT, parse_selector_operator), ]) operator = oneOf('>= <= != > < ==') comparison_token = num_operation | selector | integer comparison = (comparison_token + operator + comparison_token).\ setParseAction(lambda t: Comparison(*t)) bool_expr = infixNotation(comparison, [ ('and', 2, opAssoc.LEFT, parse_bool_expr), ('or', 2, opAssoc.LEFT, parse_bool_expr), ]) rule = Forward() condition = (Word('if') + bool_expr + Word(':') + rule + Word('else:') + rule) rule << (condition | color).setParseAction(parse_rule) return rule.parseString(lines, parseAll=True)[0]
def select_parsing(query): selectStmt = Forward() selectStmt <<= (SELECT + ("*" | columnNameList)("columnsToShow") + FROM + tableNameList("tables") + Optional(Group(WHERE + whereExpression), "")("where")) selectStmt.ignore(oracleSqlComment) return selectStmt.parseString(query)
def get_enclosed(self, raw): #Word ::= Ascii - Tokens non_token = "!#$%&\'*+,-./:;=?@\\^_`|~" word = Word(alphanums + non_token) #word = Word(printables) #Tokens ::= {}[]()<> tokens = "{}[]()<>" o_curly, c_curly, o_brack, c_brack, o_paren, c_paren, o_mayor, c_mayor = map( Suppress, tokens) enclosed_data = Forward() #Enclosed groups curly_enclosed = OneOrMore(o_curly + enclosed_data + c_curly) brack_enclosed = OneOrMore(o_brack + enclosed_data + c_brack) paren_enclosed = OneOrMore(o_paren + enclosed_data + c_paren) mayor_enclosed = OneOrMore(o_mayor + enclosed_data + c_mayor) enclosed = Optional(curly_enclosed) & Optional( brack_enclosed) & Optional(paren_enclosed) & Optional( mayor_enclosed) enclosed_data << ((OneOrMore(word) & enclosed) ^ enclosed) return enclosed_data.parseString(raw)
class RigorousClassicalPropositionalLogicParser: def __init__(self): self.left_parenthesis = Suppress("(") self.right_parenthesis = Suppress(")") self.implies = Literal("->") self.or_ = Literal("|") self.and_ = Literal("&") self.not_ = Literal("!") | Literal ("~") self.boolean = Keyword("false") | Keyword("true") self.symbol = Word(alphas, alphanums) self.formula = Forward() self.operand = self.boolean | self.symbol self.binaryConnective = self.or_ | self.and_ | self.implies self.unaryFormula = Group(self.not_ + self.formula) self.binaryFormula = Group(self.left_parenthesis + self.formula + self.binaryConnective + self.formula + self.right_parenthesis) self.formula << (self.unaryFormula | self.binaryFormula | self.operand) ## Should return a ParserResult object def parse(self,text): try: result = self.formula.parseString(text, parseAll=True) assert len(result) == 1 return result except (ParseException, ParseSyntaxException) as err: # print("Syntax error:\n{0.line}\n{1}^".format(err, " " * (err.column - 1))) return ""
def get_enclosed(self,raw): #Word ::= Ascii - Tokens non_token = "!#$%&\'*+,-./:;=?@\\^_`|~" word = Word(alphanums+non_token) #word = Word(printables) #Tokens ::= {}[]()<> tokens = "{}[]()<>" o_curly,c_curly,o_brack,c_brack,o_paren,c_paren,o_mayor,c_mayor = map(Suppress,tokens) enclosed_data = Forward() #Enclosed groups curly_enclosed = OneOrMore(o_curly + enclosed_data + c_curly) brack_enclosed = OneOrMore(o_brack + enclosed_data + c_brack) paren_enclosed = OneOrMore(o_paren + enclosed_data + c_paren) mayor_enclosed = OneOrMore(o_mayor + enclosed_data + c_mayor) enclosed = Optional(curly_enclosed) & Optional(brack_enclosed) & Optional(paren_enclosed) & Optional(mayor_enclosed) enclosed_data << ((OneOrMore(word) & enclosed) ^ enclosed) return enclosed_data.parseString(raw)
def pyparse_flo(text): left_parenthesis, right_parenthesis, colon = map(Suppress, '():') boolean = Keyword('false') | Keyword('true') forall = Keyword('forall') exists = Keyword('exists') and_ = Literal('&') or_ = Literal('|') not_ = Literal('~') equals = Literal('=') implies = Literal('->') symbol = Word(alphas, alphanums) term = Forward() term << (Group(symbol + Group(left_parenthesis + delimitedList(term) + right_parenthesis)) | symbol) formula = Forward() forall_expression = forall + symbol + colon + formula exists_expression = exists + symbol + colon + formula operand = forall_expression | exists_expression | boolean | term formula << operatorPrecedence(operand, [(equals, 2, opAssoc.RIGHT), (not_, 1, opAssoc.RIGHT), (and_, 2, opAssoc.LEFT), (or_, 2, opAssoc.LEFT), (implies, 2, opAssoc.RIGHT)]) try: result = formula.parseString(text, parseAll=True) assert len(result) == 1 return result[0].asList() except (ParseException, ParseSyntaxException) as parse_err: print('Syntax Error:\n{}\n{}^'.format(parse_err.line, ' ' * (parse_err.column - 1)))
def parse_sexp(data): '''parse sexp/S-expression format and return a python list''' # define punctuation literals LPAR, RPAR, LBRK, RBRK, LBRC, RBRC, VBAR = map(Suppress, "()[]{}|") decimal = Word("123456789", nums).setParseAction(lambda t: int(t[0])) bytes = Word(printables) raw = Group(decimal.setResultsName("len") + Suppress(":") + bytes).setParseAction(OtrPrivateKeys.verifyLen) token = Word(alphanums + "-./_:*+=") base64_ = Group(Optional(decimal, default=None).setResultsName("len") + VBAR + OneOrMore(Word( alphanums +"+/=" )).setParseAction(lambda t: b64decode("".join(t))) + VBAR).setParseAction(OtrPrivateKeys.verifyLen) hexadecimal = ("#" + OneOrMore(Word(hexnums)) + "#")\ .setParseAction(lambda t: int("".join(t[1:-1]),16)) qString = Group(Optional(decimal, default=None).setResultsName("len") + dblQuotedString.setParseAction(removeQuotes)).setParseAction(OtrPrivateKeys.verifyLen) simpleString = raw | token | base64_ | hexadecimal | qString display = LBRK + simpleString + RBRK string_ = Optional(display) + simpleString sexp = Forward() sexpList = Group(LPAR + ZeroOrMore(sexp) + RPAR) sexp << ( string_ | sexpList ) try: sexpr = sexp.parseString(data) return sexpr.asList()[0][1:] except ParseFatalException, pfe: print("Error:", pfe.msg) print(pfe.loc) print(pfe.markInputline())
def __parseNestedList( value, dtype = float): from pyparsing import Word, Group, Forward, OneOrMore, Optional, alphanums, Suppress number = Word( alphanums + ".e-" ).setParseAction( lambda s,l,t: dtype(t[0]) ) arr = Forward() element = number | arr arr << Group(Suppress('[') + ( OneOrMore(element + Optional(Suppress(",")) ) ) + Suppress(']') ) return arr.parseString(value, parseAll = True).asList()[0]
def parseArctl(spec): """Parse the spec and return its AST.""" global __arctl if __arctl is None: true = Literal("True") true.setParseAction(lambda tokens: TrueExp()) false = Literal("False") false.setParseAction(lambda tokens: FalseExp()) atom = "'" + SkipTo("'") + "'" atom.setParseAction(lambda tokens: Atom(tokens[1])) action = _logicals_(atom) __arctl = Forward() proposition = true | false | atom notproposition = "~" + proposition notproposition.setParseAction(lambda tokens: Not(tokens[1])) formula = (proposition | notproposition | Suppress("(") + __arctl + Suppress(")")) temporal = Forward() e = Literal("E") + "<" + action + ">" a = Literal("A") + "<" + action + ">" eax = e + "X" + temporal eax.setParseAction(lambda tokens: EaX(tokens[2], tokens[5])) aax = a + "X" + temporal aax.setParseAction(lambda tokens: AaX(tokens[2], tokens[5])) eaf = e + "F" + temporal eaf.setParseAction(lambda tokens: EaF(tokens[2], tokens[5])) aaf = a + "F" + temporal aaf.setParseAction(lambda tokens: AaF(tokens[2], tokens[5])) eag = e + "G" + temporal eag.setParseAction(lambda tokens: EaG(tokens[2], tokens[5])) aag = a + "G" + temporal aag.setParseAction(lambda tokens: AaG(tokens[2], tokens[5])) eau = e + "[" + __arctl + "U" + __arctl + "]" eau.setParseAction(lambda tokens: EaU(tokens[2], tokens[5], tokens[7])) aau = a + "[" + __arctl + "U" + __arctl + "]" aau.setParseAction(lambda tokens: AaU(tokens[2], tokens[5], tokens[7])) eaw = e + "[" + __arctl + "W" + __arctl + "]" eaw.setParseAction(lambda tokens: EaW(tokens[2], tokens[5], tokens[7])) aaw = a + "[" + __arctl + "W" + __arctl + "]" aaw.setParseAction(lambda tokens: AaW(tokens[2], tokens[5], tokens[7])) temporal <<= (formula | eax | aax | eaf | aaf | eag | aag | eau | aau | eaw | aaw) logical = _logicals_(temporal) __arctl <<= logical return __arctl.parseString(spec, parseAll=True)
def parse(string): ''' returns either [atomic], [monoop, [f]] or [binop, [f1], [f2]] this method is static (no need for a CTL instance) ''' lparen = Literal('(').suppress() rparen = Literal(')').suppress() wildcard = Literal('_') atom = Combine(Word(alphas) + Optional(Word('.0123456789')) ^ 'true' ^ 'false' ^ wildcard) term = Forward() term << (atom + Optional(lparen + Group(term) + ZeroOrMore(Literal(',').suppress() + Group(term)) + rparen)) A = Optional('<-')+'A' E = Optional('<-')+'E' G, Gi, F, X, U = map(Literal, ('G', 'Gi', 'F', 'X', 'U')) UnOp = wildcard ^ '!' ^ Combine(A + (G^F^X)) ^ Combine(E + (G^Gi^F^X)) BinOp = wildcard ^ Literal('or') ^ Literal('and') ^ Combine(A + U) ^ Combine(E + U) formula = Forward() formula << (Group(term) ^ (lparen + formula + rparen) ^ Group(UnOp + formula) ^ Group(BinOp + formula + formula)) # 0 because we expect only one formula in the string return formula.parseString(string).asList()[0]
def pyparse_blk(text): def create_add_block(tokens): return Block.Block(tokens.title, tokens.color if tokens.color else 'white') left_bracket, right_bracket, equal_sign = map(Suppress, '[]=') color = (Word('#', hexnums, exact=7) | Word(alphanums, alphas))('color') empty_block = ( left_bracket + right_bracket)('empty_block').setParseAction(lambda: EmptyBlock) new_lines = Word('/')('new_lines').setParseAction( lambda tokens: len(tokens.new_lines)) title = CharsNotIn('[]/\n')('title').setParseAction( lambda tokens: tokens.title.strip()) block_data = Optional(color + Suppress(':')) + Optional(title) block_data.addParseAction(create_add_block) blocks = Forward() block = left_bracket + block_data + blocks + right_bracket blocks << Group( ZeroOrMore(Optional(new_lines) + OneOrMore(empty_block | block))) stack = [Block.create_root_block()] try: result = blocks.parseString(text, parseAll=True) assert len(result) == 1 blocks_list = result.asList()[0] populate_children(blocks_list, stack) except (ParseSyntaxException, ParseException) as parse_err: raise ValueError('Error {{0}}: {0}'.format(parse_err.lineno)) return stack[0]
def parseQuery(queryString): try: parser = Forward() # parser << (Word(alphas).setResultsName( "first" ) + \ # #(' ').setResultsName( "delim" ) + \ # '*' + Word(alphas).setResultsName( "second")) # selectSpecialStmt = Forward.setResultsName("selectSpecialStmt"); # selectSpecialStmt << (selectSpecialToken + "(" + table_columns + ")" + fromToken \ # + table.setResultsName("table")); selectStmt = Forward().setResultsName("selectStmt") selectStmt << ( selectToken + ( '*' | func_table_column | table_columns).setResultsName( "columns" ) \ + fromToken + tables.setResultsName("tables") \ + Optional(whereToken + whereExpression.setResultsName("conds") ) ) deleteStmt = Forward().setResultsName("deleteStmt") deleteStmt << ( deleteToken + table.setResultsName("table") \ + whereToken + whereExpression.setResultsName("conds")) insertStmt = Forward().setResultsName("insertStmt") insertStmt << ( insertToken + table.setResultsName("table") + valuesToken \ + "(" + intNums.setResultsName("intValues") + ")" ) createStmt = Forward().setResultsName("createStmt") createStmt << ( createToken + table.setResultsName("table") + "(" \ + Group(delimitedList(column + intToken)).setResultsName("fields") + ")" ) truncateStmt = Forward().setResultsName("truncateStmt") truncateStmt << (truncateToken + table.setResultsName("table")) dropStmt = Forward().setResultsName("dropStmt") dropStmt << (dropToken + table.setResultsName("table")) parser = selectStmt | insertStmt | deleteStmt | createStmt | truncateStmt | dropStmt | exitToken tokens = parser.parseString(queryString) # import pdb; pdb.set_trace() return tokens except Exception as e: # print e; print "Error in format." return []
def __parser(expression): """ adopted from Paul McGuire example. http://pyparsing.wikispaces.com/file/view/fourFn.py """ expr_stack = [] def push_first(strg, loc, toks): expr_stack.append(toks[0]) def push_u_minus(strg, loc, toks): if toks and toks[0] == '-': expr_stack.append('unary -') point = Literal('.') _e = CaselessLiteral('E') fnumber = Combine( Word('+-' + nums, nums) + Optional(point + Optional(Word(nums))) + Optional(_e + Word('+-' + nums, nums))) ident = Word(alphas, alphas + nums + '_$') plus = Literal("+") minus = Literal("-") mult = Literal("*") div = Literal("/") lpar = Literal("(").suppress() rpar = Literal(")").suppress() addop = plus | minus multop = mult | div expop = Literal("^") _pi = CaselessLiteral("PI") x = CaselessLiteral("X") expr = Forward() atom = (Optional("-") + (x | _pi | _e | fnumber | ident + lpar + expr + rpar).setParseAction(push_first) | (lpar + expr.suppress() + rpar)).setParseAction(push_u_minus) factor = Forward() factor << atom + ZeroOrMore( (expop + factor).setParseAction(push_first)) term = factor + ZeroOrMore( (multop + factor).setParseAction(push_first)) expr << term + ZeroOrMore((addop + term).setParseAction(push_first)) expr.parseString(expression) return expr_stack
def pyparsing_parse(text): """ >>> import os >>> dirname = os.path.join(os.path.dirname(__file__), "data") >>> filename = os.path.join(dirname, "error1.blk") >>> pyparsing_parse(open(filename, encoding="utf8").read()) Traceback (most recent call last): ... ValueError: Error {0}: syntax error, line 8 >>> filename = os.path.join(dirname, "error2.blk") >>> pyparsing_parse(open(filename, encoding="utf8").read()) Traceback (most recent call last): ... ValueError: Error {0}: syntax error, line 1 >>> filename = os.path.join(dirname, "error3.blk") >>> pyparsing_parse(open(filename, encoding="utf8").read()) Traceback (most recent call last): ... ValueError: Error {0}: syntax error, line 4 >>> expected = "[white: ]\\n[lightblue: Director]\\n/\\n/\\n[white: ]\\n[lightgreen: Secretary]\\n/\\n/\\n[white: Minion #1]\\n[white: ]\\n[white: Minion #2]" >>> filename = os.path.join(dirname, "hierarchy.blk") >>> blocks = pyparsing_parse(open(filename, encoding="utf8").read()) >>> str(blocks).strip() == expected True >>> expected = "[#00CCDE: MessageBox Window\\n[lightgray: Frame\\n[white: ]\\n[white: Message text]\\n/\\n/\\n[goldenrod: OK Button]\\n[white: ]\\n[#ff0505: Cancel Button]\\n/\\n[white: ]\\n]\\n]" >>> filename = os.path.join(dirname, "messagebox.blk") >>> blocks = pyparsing_parse(open(filename, encoding="utf8").read()) >>> str(blocks).strip() == expected True """ def add_block(tokens): return Block.Block(tokens.name, tokens.color if tokens.color else "white") left_bracket, right_bracket = map(Suppress, "[]") new_rows = Word("/")("new_rows").setParseAction( lambda tokens: len(tokens.new_rows)) name = CharsNotIn("[]/\n")("name").setParseAction( lambda tokens: tokens.name.strip()) color = (Word("#", hexnums, exact=7) | Word(alphas, alphanums))("color") empty_node = (left_bracket + right_bracket).setParseAction(lambda: EmptyBlock) nodes = Forward() node_data = Optional(color + Suppress(":")) + Optional(name) node_data.setParseAction(add_block) node = left_bracket - node_data + nodes + right_bracket nodes << Group( ZeroOrMore(Optional(new_rows) + OneOrMore(node | empty_node))) stack = [Block.get_root_block()] try: results = nodes.parseString(text, parseAll=True) assert len(results) == 1 items = results.asList()[0] populate_children(items, stack) except (ParseException, ParseSyntaxException) as err: raise ValueError("Error {{0}}: syntax error, line " "{0}".format(err.lineno)) return stack[0]
def parseATL(spec): """Parse the spec and return the list of possible ASTs.""" global __atl if __atl is None: true = Literal("True") true.setParseAction(lambda tokens: TrueExp()) false = Literal("False") false.setParseAction(lambda tokens: FalseExp()) atom = "'" + SkipTo("'") + "'" atom.setParseAction(lambda tokens: Atom(tokens[1])) agent = atom group = Group(ZeroOrMore(agent + Suppress(",")) + agent) proposition = true | false | atom __atl = Forward() notproposition = "~" + proposition notproposition.setParseAction(lambda tokens: Not(tokens[1])) formula = (proposition | notproposition | Suppress("(") + __atl + Suppress(")")) logical = Forward() cax = Literal("[") + group + "]" + "X" + logical cax.setParseAction(lambda tokens: CAX(tokens[1], tokens[4])) cex = Literal("<") + group + ">" + "X" + logical cex.setParseAction(lambda tokens: CEX(tokens[1], tokens[4])) caf = Literal("[") + group + "]" + "F" + logical caf.setParseAction(lambda tokens: CAF(tokens[1], tokens[4])) cef = Literal("<") + group + ">" + "F" + logical cef.setParseAction(lambda tokens: CEF(tokens[1], tokens[4])) cag = Literal("[") + group + "]" + "G" + logical cag.setParseAction(lambda tokens: CAG(tokens[1], tokens[4])) ceg = Literal("<") + group + ">" + "G" + logical ceg.setParseAction(lambda tokens: CEG(tokens[1], tokens[4])) cau = Literal("[") + group + "]" + "[" + __atl + "U" + __atl + "]" cau.setParseAction(lambda tokens: CAU(tokens[1], tokens[4], tokens[6])) ceu = Literal("<") + group + ">" + "[" + __atl + "U" + __atl + "]" ceu.setParseAction(lambda tokens: CEU(tokens[1], tokens[4], tokens[6])) caw = Literal("[") + group + "]" + "[" + __atl + "W" + __atl + "]" caw.setParseAction(lambda tokens: CAW(tokens[1], tokens[4], tokens[6])) cew = Literal("<") + group + ">" + "[" + __atl + "W" + __atl + "]" cew.setParseAction(lambda tokens: CEW(tokens[1], tokens[4], tokens[6])) strategic = (cax | cex | caf | cef | cag | ceg | cau | ceu | caw | cew) logical <<= (formula | strategic) __atl <<= (_logicals_(logical)) return __atl.parseString(spec, parseAll = True)
def delete_query_parse(query): deleteStmt = Forward() # define the grammar # e.g the rules to parse an sql select query deleteStmt <<= (DELETE + FROM + tableName("tableName") + Optional(Group(WHERE + whereExpression), "")("where")) deleteStmt.ignore(oracleSqlComment) return deleteStmt.parseString(query)
def _parse_term(text): left_parenthesis, right_parenthesis, colon = map(Suppress, "():") symbol = Word(alphas + "_" + "?" + ".", alphanums + "_" + "?" + "." + "-") term = Forward() term << (Group(symbol + Group(left_parenthesis + delimitedList(term) + right_parenthesis)) | symbol) result = term.parseString(text, parseAll=True) return result.asList()[0]
def parseArctl(spec): """Parse the spec and return its AST.""" global __arctl if __arctl is None: true = Literal("True") true.setParseAction(lambda tokens: TrueExp()) false = Literal("False") false.setParseAction(lambda tokens: FalseExp()) atom = "'" + SkipTo("'") + "'" atom.setParseAction(lambda tokens: Atom(tokens[1])) action = _logicals_(atom) __arctl = Forward() proposition = true | false | atom notproposition = "~" + proposition notproposition.setParseAction(lambda tokens: Not(tokens[1])) formula = proposition | notproposition | Suppress("(") + __arctl + Suppress(")") temporal = Forward() e = Literal("E") + "<" + action + ">" a = Literal("A") + "<" + action + ">" eax = e + "X" + temporal eax.setParseAction(lambda tokens: EaX(tokens[2], tokens[5])) aax = a + "X" + temporal aax.setParseAction(lambda tokens: AaX(tokens[2], tokens[5])) eaf = e + "F" + temporal eaf.setParseAction(lambda tokens: EaF(tokens[2], tokens[5])) aaf = a + "F" + temporal aaf.setParseAction(lambda tokens: AaF(tokens[2], tokens[5])) eag = e + "G" + temporal eag.setParseAction(lambda tokens: EaG(tokens[2], tokens[5])) aag = a + "G" + temporal aag.setParseAction(lambda tokens: AaG(tokens[2], tokens[5])) eau = e + "[" + __arctl + "U" + __arctl + "]" eau.setParseAction(lambda tokens: EaU(tokens[2], tokens[5], tokens[7])) aau = a + "[" + __arctl + "U" + __arctl + "]" aau.setParseAction(lambda tokens: AaU(tokens[2], tokens[5], tokens[7])) eaw = e + "[" + __arctl + "W" + __arctl + "]" eaw.setParseAction(lambda tokens: EaW(tokens[2], tokens[5], tokens[7])) aaw = a + "[" + __arctl + "W" + __arctl + "]" aaw.setParseAction(lambda tokens: AaW(tokens[2], tokens[5], tokens[7])) temporal <<= formula | eax | aax | eaf | aaf | eag | aag | eau | aau | eaw | aaw logical = _logicals_(temporal) __arctl <<= logical return __arctl.parseString(spec, parseAll=True)
def parseATL(spec): """Parse the spec and return the list of possible ASTs.""" global __atl if __atl is None: true = Literal("True") true.setParseAction(lambda tokens: TrueExp()) false = Literal("False") false.setParseAction(lambda tokens: FalseExp()) atom = "'" + SkipTo("'") + "'" atom.setParseAction(lambda tokens: Atom(tokens[1])) agent = atom group = Group(ZeroOrMore(agent + Suppress(",")) + agent) proposition = true | false | atom __atl = Forward() notproposition = "~" + proposition notproposition.setParseAction(lambda tokens: Not(tokens[1])) formula = (proposition | notproposition | Suppress("(") + __atl + Suppress(")")) logical = Forward() cax = Literal("[") + group + "]" + "X" + logical cax.setParseAction(lambda tokens: CAX(tokens[1], tokens[4])) cex = Literal("<") + group + ">" + "X" + logical cex.setParseAction(lambda tokens: CEX(tokens[1], tokens[4])) caf = Literal("[") + group + "]" + "F" + logical caf.setParseAction(lambda tokens: CAF(tokens[1], tokens[4])) cef = Literal("<") + group + ">" + "F" + logical cef.setParseAction(lambda tokens: CEF(tokens[1], tokens[4])) cag = Literal("[") + group + "]" + "G" + logical cag.setParseAction(lambda tokens: CAG(tokens[1], tokens[4])) ceg = Literal("<") + group + ">" + "G" + logical ceg.setParseAction(lambda tokens: CEG(tokens[1], tokens[4])) cau = Literal("[") + group + "]" + "[" + __atl + "U" + __atl + "]" cau.setParseAction(lambda tokens: CAU(tokens[1], tokens[4], tokens[6])) ceu = Literal("<") + group + ">" + "[" + __atl + "U" + __atl + "]" ceu.setParseAction(lambda tokens: CEU(tokens[1], tokens[4], tokens[6])) caw = Literal("[") + group + "]" + "[" + __atl + "W" + __atl + "]" caw.setParseAction(lambda tokens: CAW(tokens[1], tokens[4], tokens[6])) cew = Literal("<") + group + ">" + "[" + __atl + "W" + __atl + "]" cew.setParseAction(lambda tokens: CEW(tokens[1], tokens[4], tokens[6])) strategic = (cax | cex | caf | cef | cag | ceg | cau | ceu | caw | cew) logical <<= (formula | strategic) __atl <<= (_logicals_(logical)) return __atl.parseString(spec, parseAll=True)
def _string_to_ast(self, input_string): """ Parse a smart search string and return it in an AST like form """ # simple words # we need to use a regex to match on words because the regular # Word(alphanums) will only match on American ASCII alphanums and since # we try to be Unicode / internationally friendly we need to match much # much more. Trying to expand a word class to catch it all seems futile # so we match on everything *except* a few things, like our operators comp_word = Regex("[^*\s=><~!]+") word = Regex("[^*\s=><~!]+").setResultsName('word') # numbers comp_number = Word(nums) number = Word(nums).setResultsName('number') # IPv4 address ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)) ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)).setResultsName('ipv4_address') # IPv6 address ipv6_address = Regex("((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?").setResultsName('ipv6_address') ipv6_prefix = Combine(ipv6_address + Regex("/(12[0-8]|1[01][0-9]|[0-9][0-9]?)")).setResultsName('ipv6_prefix') # VRF RTs of the form number:number vrf_rt = Combine((comp_ipv4_address | comp_number) + Literal(':') + comp_number).setResultsName('vrf_rt') # tags tags = Combine( Literal('#') + comp_word).setResultsName('tag') # operators for matching match_op = oneOf(' '.join(self.match_operators)).setResultsName('operator') boolean_op = oneOf(' '.join(self.boolean_operators)).setResultsName('boolean') # quoted string d_quoted_string = QuotedString('"', unquoteResults=True, escChar='\\') s_quoted_string = QuotedString('\'', unquoteResults=True, escChar='\\') quoted_string = (s_quoted_string | d_quoted_string).setResultsName('quoted_string') # expression to match a certain value for an attribute expression = Group(word + match_op + (quoted_string | vrf_rt | word | number)).setResultsName('expression') # we work on atoms, which are single quoted strings, match expressions, # tags, VRF RT or simple words. # NOTE: Place them in order of most exact match first! atom = Group(ipv6_prefix | ipv6_address | quoted_string | expression | tags | vrf_rt | boolean_op | word) enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) enclosed << ( parens | atom ).setResultsName('nested') content = Forward() content << ( ZeroOrMore(enclosed) ) res = content.parseString(input_string) return res
def create_db_parse(query): # Forward declaration of selectStmt to define it later createDBStmt = Forward() # define the grammar # e.g the rules to parse an sql select query createDBStmt <<= (CREATE + DATABASE + Optional(IF_NOT_EXISTS)("existence_clause") + ident("dbName")) createDBStmt.ignore(oracleSqlComment) return createDBStmt.parseString(query)
def __parse_nested_list(value, dtype=float): from pyparsing import Word, Group, Forward, OneOrMore, Optional, alphanums, Suppress number = Word(alphanums + ".e-").setParseAction(lambda s, l, t: dtype(t[0])) arr = Forward() element = number | arr arr << Group( Suppress('[') + (OneOrMore(element + Optional(Suppress(",")))) + Suppress(']')) return arr.parseString(value, parseAll=True).asList()[0]
def insert_query_parse(query): insertStmt = Forward() value = (realNum | intNum | quotedString | dblQuotedString) values = Group(delimitedList(value)) valuesWithParenthesis = "(" + values + ")" valuesList = Group(delimitedList(valuesWithParenthesis)) # define the grammar insertStmt <<= (INSERT + INTO + tableName("tableName") + VALUES + valuesList("valuesList")) insertStmt.ignore(oracleSqlComment) return insertStmt.parseString(query)
def get_operands(self): lpar = Literal("(").suppress() rpar = Literal(")").suppress() diff = Literal("-").suppress() intersection = Literal("n").suppress() union = Literal("U").suppress() operand = Word(alphanums) operator = diff | intersection | union sexp = Forward() term = operand.setParseAction( self.push_first_operand_stack) | lpar + sexp + rpar sexp << term + ZeroOrMore( (operator + term).setParseAction(self.push_first_operand_stack)) sexp.parseString(self.expression) return self.operand_stack
def _string_to_ast(self, input_string): """ Parse a smart search string and return it in an AST like form """ # simple words comp_word = Word(alphanums + "-./_") word = Word(alphanums + "-./_").setResultsName('word') # numbers comp_number = Word(nums) number = Word(nums).setResultsName('number') # IPv4 address ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct * 3)) ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct * 3)).setResultsName('ipv4_address') # VRF RTs of the form number:number vrf_rt = Combine((comp_ipv4_address | comp_number) + Literal(':') + comp_number).setResultsName('vrf_rt') # tags tags = Combine(Literal('#') + comp_word).setResultsName('tag') # operators for matching match_op = oneOf(' '.join( self.match_operators)).setResultsName('operator') boolean_op = oneOf(' '.join( self.boolean_operators)).setResultsName('boolean') # quoted string quoted_string = QuotedString( '"', unquoteResults=True, escChar='\\').setResultsName('quoted_string') # expression to match a certain value for an attribute expression = Group(word + match_op + (quoted_string | vrf_rt | word | number)).setResultsName('expression') # we work on atoms, which are single quoted strings, match expressions, # tags, VRF RT or simple words. # NOTE: Place them in order of most exact match first! atom = Group(quoted_string | expression | tags | vrf_rt | boolean_op | word) enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) enclosed << (parens | atom).setResultsName('nested') content = Forward() content << (ZeroOrMore(enclosed)) res = content.parseString(input_string) return res
def main(s): lpar = Literal('(').suppress() rpar = Literal(')').suppress() integer = Word(nums) element = Word(alphas, exact=1) formula = Forward() term = Group((element | Group(lpar + formula + rpar)('subgroup')) + Optional(integer, default=1)('mult')) formula << OneOrMore(term) integer.setParseAction(process_integer) term.setParseAction(process_term) formula.setParseAction(process_formula) return formula.parseString(s)[0]
def parse (input): # parse a string into an element of the abstract representation # Grammar: # # <expr> ::= <integer> # true # false # <identifier> # ( if <expr> <expr> <expr> ) # ( let ( ( <name> <expr> ) ) <expr ) # ( + <expr> <expr> ) # ( * <expr> <expr> ) # idChars = alphas+"_+*-?!=<>" pIDENTIFIER = Word(idChars, idChars+"0123456789") pIDENTIFIER.setParseAction(lambda result: EId(result[0])) # A name is like an identifier but it does not return an EId... pNAME = Word(idChars,idChars+"0123456789") pINTEGER = Word("-0123456789","0123456789") pINTEGER.setParseAction(lambda result: EInteger(int(result[0]))) pBOOLEAN = Keyword("true") | Keyword("false") pBOOLEAN.setParseAction(lambda result: EBoolean(result[0]=="true")) pEXPR = Forward() pIF = "(" + Keyword("if") + pEXPR + pEXPR + pEXPR + ")" pIF.setParseAction(lambda result: EIf(result[2],result[3],result[4])) pBINDING = "(" + pNAME + pEXPR + ")" pBINDING.setParseAction(lambda result: (result[1],result[2])) pLET = "(" + Keyword("let") + "(" + pBINDING + ")" + pEXPR + ")" pLET.setParseAction(lambda result: ELet([result[3]],result[5])) pPLUS = "(" + Keyword("+") + pEXPR + pEXPR + ")" pPLUS.setParseAction(lambda result: ECall("+",[result[2],result[3]])) pTIMES = "(" + Keyword("*") + pEXPR + pEXPR + ")" pTIMES.setParseAction(lambda result: ECall("*",[result[2],result[3]])) pEXPR << (pINTEGER | pBOOLEAN | pIDENTIFIER | pIF | pLET | pPLUS | pTIMES) result = pEXPR.parseString(input)[0] return result # the first element of the result is the expression
def __str_to_list(s): def range_to_list(s, loc, tokens): l = [] if len(tokens) == 1: l.append(int(tokens[0])) else: l += list(range(int(tokens[0]), int(tokens[1]) + 1)) return l expr = Forward() term = (Word(nums) + Optional(Literal('-').suppress() + Word(nums))).setParseAction(range_to_list) expr << term + Optional(Literal(',').suppress() + expr) return expr.parseString(s, parseAll=True)
def parse(string): plus = Literal("+") mult = Literal("*") oper = plus ^ mult lpar, rpar = map(Suppress, "()") number = Word(nums) expr = Forward() group = Group(lpar + expr + rpar) atom = oper + (expr ^ group) expr <<= (number ^ group) + ZeroOrMore(atom) return expr.parseString(string)
def parse(input): # parse a string into an element of the abstract representation # Grammar: # # <expr> ::= <integer> # true # false # <identifier> # ( if <expr> <expr> <expr> ) # ( let ( ( <name> <expr> ) ) <expr ) # ( + <expr> <expr> ) # ( * <expr> <expr> ) # idChars = alphas + "_+*-?!=<>" pIDENTIFIER = Word(idChars, idChars + "0123456789") pIDENTIFIER.setParseAction(lambda result: EId(result[0])) # A name is like an identifier but it does not return an EId... pNAME = Word(idChars, idChars + "0123456789") pINTEGER = Word("-0123456789", "0123456789") pINTEGER.setParseAction(lambda result: EInteger(int(result[0]))) pBOOLEAN = Keyword("true") | Keyword("false") pBOOLEAN.setParseAction(lambda result: EBoolean(result[0] == "true")) pEXPR = Forward() pIF = "(" + Keyword("if") + pEXPR + pEXPR + pEXPR + ")" pIF.setParseAction(lambda result: EIf(result[2], result[3], result[4])) pBINDING = "(" + pNAME + pEXPR + ")" pBINDING.setParseAction(lambda result: (result[1], result[2])) pLET = "(" + Keyword("let") + "(" + pBINDING + ")" + pEXPR + ")" pLET.setParseAction(lambda result: ELet([result[3]], result[5])) pPLUS = "(" + Keyword("+") + pEXPR + pEXPR + ")" pPLUS.setParseAction(lambda result: ECall("+", [result[2], result[3]])) pTIMES = "(" + Keyword("*") + pEXPR + pEXPR + ")" pTIMES.setParseAction(lambda result: ECall("*", [result[2], result[3]])) pEXPR << (pINTEGER | pBOOLEAN | pIDENTIFIER | pIF | pLET | pPLUS | pTIMES) result = pEXPR.parseString(input)[0] return result # the first element of the result is the expression
def sandbox(): """Based on http://stackoverflow.com/a/4802004/623735""" loose_grammar = Forward() nestedParens = nestedExpr('(', ')', content=loose_grammar) loose_grammar << ( OneOrMore(Optional(':').suppress() + Word(alphanums + '-_')) | OneOrMore(Optional('?').suppress() + Word(alphanums + '-_')) | init | goal | ',' | nestedParens) examples = [ # definitely not PDDL-compliant, but parser does OK anyway (not strict) '(some global things (:a (nested list of three varibles (?list0 ?list1 ?list2))))', # this is a valid line of STRIPS (subset of PDDL grammar?) '(:requirements :strips)', # another valid line of STRIPS (subset of PDDL grammar?) '(define (domain random-domain))', # a complete (if simple) STRIPS problem definition from coursera AI Planning class, HW wk2 r''' (define (problem random-pbl1) (:domain random-domain) (:init (S B B) (S C B) (S A C) (R B B) (R C B)) (:goal (and (S A A)))) ''', # a complete STRIPS domain definition from coursera AI Planning class, HW wk2 r''' (define (domain random-domain) (:requirements :strips) (:action op1 :parameters (?x1 ?x2 ?x3) :precondition (and (S ?x1 ?x2) (R ?x3 ?x1)) :effect (and (S ?x2 ?x1) (S ?x1 ?x3) (not (R ?x3 ?x1)))) (:action op2 :parameters (?x1 ?x2 ?x3) :precondition (and (S ?x3 ?x1) (R ?x2 ?x2)) :effect (and (S ?x1 ?x3) (not (S ?x3 ?x1))))) ''', ] ans = [] for ex in examples: try: ans += [loose_grammar.parseString(ex).asList()] print(ans[-1]) except: print_exc() return ans
def __init__(self, query): self._methods = { 'and': self.evaluate_and, 'or': self.evaluate_or, 'not': self.evaluate_not, 'parenthesis': self.evaluate_parenthesis, 'quotes': self.evaluate_quotes, 'word': self.evaluate_word, } self.line = '' self.query = query.lower() if query else '' if self.query: operator_or = Forward() operator_word = Group(Word(alphanums)).setResultsName('word') operator_quotes_content = Forward() operator_quotes_content << ( (operator_word + operator_quotes_content) | operator_word ) operator_quotes = Group( Suppress('"') + operator_quotes_content + Suppress('"') ).setResultsName("quotes") | operator_word operator_parenthesis = Group( (Suppress("(") + operator_or + Suppress(")")) ).setResultsName("parenthesis") | operator_quotes operator_not = Forward() operator_not << (Group( Suppress(Keyword("not", caseless=True)) + operator_not ).setResultsName("not") | operator_parenthesis) operator_and = Forward() operator_and << (Group( operator_not + Suppress(Keyword("and", caseless=True)) + operator_and ).setResultsName("and") | Group( operator_not + OneOrMore(~oneOf("and or") + operator_and) ).setResultsName("and") | operator_not) operator_or << (Group( operator_and + Suppress(Keyword("or", caseless=True)) + operator_or ).setResultsName("or") | operator_and) self._parser = operator_or.parseString(self.query)[0] else: self._parser = False
def __init__(self, query): self._methods = { 'and': self.evaluate_and, 'or': self.evaluate_or, 'not': self.evaluate_not, 'parenthesis': self.evaluate_parenthesis, 'quotes': self.evaluate_quotes, 'word': self.evaluate_word, } self.line = '' self.query = query.lower() if query else '' if self.query: operator_or = Forward() operator_word = Group(Word(alphanums)).setResultsName('word') operator_quotes_content = Forward() operator_quotes_content << ( (operator_word + operator_quotes_content) | operator_word) operator_quotes = Group( Suppress('"') + operator_quotes_content + Suppress('"')).setResultsName('quotes') | operator_word operator_parenthesis = Group( (Suppress('(') + operator_or + Suppress(")") )).setResultsName0('parenthesis') | operator_quotes operator_not = Forward() operator_not << ( Group(Suppress(Keyword('no', caseless=True)) + operator_not).setResultsName('not') | operator_parenthesis) operator_and = Forward() operator_and << ( Group(operator_not + Suppress(Keyword('and', caseless=True)) + operator_and).setResultsName('and') | Group(operator_not + OneOrMore(~oneOf('and or') + operator_and) ).setResultsName('and') | operator_not) operator_or << ( Group(operator_and + Suppress(Keyword('or', caseless=True)) + operator_or).setResultsName('or') | operator_and) self._parser = operator_or.parseString(self.query)[0] else: self._parser = False
def create_ast(expression_str: str) -> List[Union[str, List]]: """Evaluates the given expression""" expression = Forward() operand = Group(Char(alphas) + ZeroOrMore("'")) | Group( Group("(" + expression + ")") + ZeroOrMore("'")) expression <<= infixNotation( operand, [ (Empty(), 2, opAssoc.LEFT), ("*", 2, opAssoc.LEFT), ("+", 2, opAssoc.LEFT), ], ) return expression.parseString(expression_str, parseAll=True).asList()
def parse_block(self, block_text): """Parses sql block into tokens """ # Valid grammar looks like this: # {sqlbarchart: title='Some string' | other params as yet unknown...} # make a grammar block_start = Literal("{") sql_start = Keyword(self.TAGNAME, caseless=True) colon = Literal(":") sql_end = Literal("}") separator = Literal("|") block_end = Keyword("{" + self.TAGNAME + "}", caseless=True) # params field_name = Word(alphanums) equal_sign = Suppress(Literal("=")) # whatever value field_value = (CharsNotIn("|}")) # param name and value param_group = Group(field_name + equal_sign + field_value) # list of all params param_list = delimitedList(param_group, '|') # helper param_dict = Dict(param_list) # sql text sql_text = SkipTo(block_end) sqldecl = Forward() sqldecl << (block_start + sql_start + Optional(colon) + Optional(param_dict) + sql_end + sql_text.setResultsName('sqltext') + block_end) block_str = "".join(block_text) tokens = sqldecl.parseString( block_str ) return tokens
def _string_to_ast(self, input_string): """ Parse a smart search string and return it in an AST like form """ # simple words comp_word = Word(alphanums + "-./_") word = Word(alphanums + "-./_").setResultsName('word') # numbers comp_number = Word(nums) number = Word(nums).setResultsName('number') # IPv4 address ipv4_oct = Regex("((2(5[0-5]|[0-4][0-9])|[01]?[0-9][0-9]?))") comp_ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)) ipv4_address = Combine(ipv4_oct + ('.' + ipv4_oct*3)).setResultsName('ipv4_address') # VRF RTs of the form number:number vrf_rt = Combine((comp_ipv4_address | comp_number) + Literal(':') + comp_number).setResultsName('vrf_rt') # tags tags = Combine( Literal('#') + comp_word).setResultsName('tag') # operators for matching match_op = oneOf(' '.join(self.match_operators)).setResultsName('operator') boolean_op = oneOf(' '.join(self.boolean_operators)).setResultsName('boolean') # quoted string quoted_string = QuotedString('"', unquoteResults=True, escChar='\\').setResultsName('quoted_string') # expression to match a certain value for an attribute expression = Group(word + match_op + (quoted_string | vrf_rt | word | number)).setResultsName('expression') # we work on atoms, which are single quoted strings, match expressions, # tags, VRF RT or simple words. # NOTE: Place them in order of most exact match first! atom = Group(quoted_string | expression | tags | vrf_rt | boolean_op | word) enclosed = Forward() parens = nestedExpr('(', ')', content=enclosed) enclosed << ( parens | atom ).setResultsName('nested') content = Forward() content << ( ZeroOrMore(enclosed) ) res = content.parseString(input_string) return res
def _parse_query(self): """ Defines and parses the Resource DSL based on the query associated with the PanoptesResourceDSL object Returns: list: The list of tokens parsed Raises: ParseException: This exception is raised if any parsing error occurs """ resource_fields = oneOf( 'resource_site resource_class resource_subclass resource_type resource_id resource_endpoint', caseless=True) resource_metadata = CaselessLiteral('resource_metadata') + Literal( '.') + Word(alphanums + '_') and_ = CaselessKeyword('AND').setParseAction(upcaseTokens) or_ = CaselessKeyword('OR').setParseAction(upcaseTokens) not_ = CaselessKeyword('NOT').setParseAction(upcaseTokens) in_ = CaselessKeyword('IN').setParseAction(upcaseTokens) like_ = CaselessKeyword('LIKE').setParseAction(upcaseTokens) operators = oneOf("= != eq ne", caseless=True).setParseAction(upcaseTokens) query_expression = Forward() query_l_val = (resource_fields | resource_metadata).setParseAction(downcaseTokens) query_r_val = QuotedString(quoteChar='"', escChar='\\') query_condition = Group((query_l_val + operators + query_r_val) | (query_l_val + Optional(not_) + like_ + query_r_val) | (query_l_val + Optional(not_) + in_ + '(' + delimitedList(query_r_val) + ')')) query_expression << query_condition - ZeroOrMore( (and_ | or_) - query_condition) try: tokens = query_expression.parseString(self._query, parseAll=True) except ParseException as e: raise e return tokens
def load_js_obj_literal(j): """Terrible hack.""" j = j[j.index('{'):] j = j.replace('\n', '').replace('\t', '') j = j.replace(';', '') j = re.sub(r'//.*?{', r'{', j) LBRACK, RBRACK, LBRACE, RBRACE, COLON, COMMA = map(Suppress,"[]{}:,") integer = Regex(r"[+-]?\d+").setParseAction(lambda t:int(t[0])) real = Regex(r"[+-]?\d+\.\d*").setParseAction(lambda t:float(t[0])) string_ = Word(alphas,alphanums+"_") | quotedString.setParseAction(removeQuotes) bool_ = oneOf("true false").setParseAction(lambda t: t[0]=="true") item = Forward() key = string_ dict_ = LBRACE - Optional(dictOf(key+COLON, item+Optional(COMMA))) + RBRACE list_ = LBRACK - Optional(delimitedList(item)) + RBRACK item << (real | integer | string_ | bool_ | Group(list_ | dict_ )) result = item.parseString(j,parseAll=True)[0] return result
def create_table_query_parse(query): createTableStmt = Forward() charType = Combine(CHAR + "(" + intNum + ")") varcharType = Combine(VARCHAR + "(" + intNum + ")") columnTypeName = (INT | FLOAT | charType | varcharType | DATE | DATETIME | TIME | YEAR) column = Group(columnName + columnTypeName + Optional(NOT_NULL | NULL)) # Token for a sublist of column name columnList = Group(delimitedList(column)) # define the grammar # e.g the rules to parse an sql select query createTableStmt <<= (CREATE + TABLE + tableName("tableName") + "(" + columnList("columnList") + ")") createTableStmt.ignore(oracleSqlComment) return createTableStmt.parseString(query) return tableName
def make_sexp_parser (): """ Returns a simple parser for nested lists of real numbers. Round parens () are assumed as customary in lisps. """ # Punctuation literals (note round parens): LPAR, RPAR = map (Suppress, "()") # Real numbers: real_string = Regex (r"[+-]?\d+\.\d*([eE][+-]?\d+)?") real = real_string.setParseAction (lambda tokens: float (tokens[0])) # Voodoo: sexp = Forward () sexp_list = Group (LPAR + ZeroOrMore (sexp) + RPAR) sexp << (real | sexp_list) return lambda s: sexp.parseString (s)[0]
def parse_variadic_templates(txt): template_param_type = Word(alphas) template_variadic = Literal('...') template_id = Word(alphas) template_variadic_param = Group( template_param_type + template_variadic + template_id ) template_param = Group( template_param_type + template_id ) # template_params = Group ( delimitedList( template_variadic_param | Optional(template_param) ) ) template_params = ( Optional( OneOrMore(template_param + ',') ) + template_variadic_param + Optional( OneOrMore( ',' + template_param ) ) ) template_params_no_variadic = ( template_param + Optional( OneOrMore( ',' + template_param ) ) ) template_decl = Optional( "template" + Literal("<") + template_params_no_variadic + Literal(">") ) + "template" + Literal("<") + template_params + Literal(">") block_content = Forward() block = nestedExpr('{', '}', content=block_content) + Literal(';') * (0,1) block_content << ( CharsNotIn('{}') | block ) decl = originalTextFor( template_decl + CharsNotIn('{') + block ) template_file = Forward() code_block = decl | White() | Word(printables) template_file << ( Optional(OneOrMore(code_block)) | template_file) parsed = template_file.parseString( txt ) return parsed
def parse_block(self, block_text): # make a grammar block_start = Literal("{") sql_start = Keyword("sqltable", caseless=True) colon = Literal(":") sql_end = Literal("}") separator = Literal("|") block_end = Keyword("{sqltable}", caseless=True) # params field_name = Word(alphanums) equal_sign = Suppress(Literal("=")) # whatever value field_value = (CharsNotIn("|}")) # param name and value param_group = Group(field_name + equal_sign + field_value) # list of all params param_list = delimitedList(param_group, '|') # helper param_dict = Dict(param_list) # sql text sql_text = SkipTo(block_end) sqldecl = Forward() sqldecl << (block_start + sql_start + Optional(colon) + Optional(param_dict) + sql_end + sql_text.setResultsName('sqltext') + block_end) block_str = "".join(block_text) tokens = sqldecl.parseString( block_str ) return tokens
def expand(text): """ Perform bash-like produce brace expansion. e.g. a{b,c} => ["ab", "ac"] """ from pyparsing import (Suppress, Optional, CharsNotIn, Forward, Group, ZeroOrMore, delimitedList) lbrack, rbrack = map(Suppress, "{}") optAnything = Optional(CharsNotIn("{},")) braceExpr = Forward() listItem = optAnything ^ Group(braceExpr) bracketedList = Group(lbrack + delimitedList(listItem) + rbrack) braceExpr << optAnything + ZeroOrMore(bracketedList + optAnything) result = braceExpr.parseString(text).asList() return list(lst_extend(result))
class CPL_Parser: def __init__(self): self.left_parenthesis, self.right_parenthesis = map(Suppress, "()") self.implies = Literal("->") self.or_ = Literal("|") self.and_ = Literal("&") self.not_ = Literal("!") | Literal ("~") self.boolean = Keyword("false") | Keyword("true") self.symbol = Word(alphas, alphanums) self.formula = Forward() self.operand = self.boolean | self.symbol self.formula << operatorPrecedence(self.operand, [ (self.not_, 1, opAssoc.RIGHT, NotConnective), (self.and_, 2, opAssoc.LEFT, AndConnective), (self.or_, 2, opAssoc.LEFT, OrConnective), (self.implies, 2, opAssoc.RIGHT, ImpliesConnective)]) def parse(self,text): try: result = self.formula.parseString(text, parseAll=True) assert len(result) == 1 resultList = list() if (isinstance(result[0],str)): resultList.append(result[0]) else: resultList.append(result[0].asList()) return resultList except (ParseException, ParseSyntaxException) as err: print("Syntax error:\n{0.line}\n{1}^".format(err, " " * (err.column - 1))) return ""
class SExpressionParser(object): def __init__(self): self.lpar = Literal('(') self.rpar = Literal(')') self.word_chars = ''.join(c for c in printables if c not in ('()')) self.word = Word(self.word_chars) | quotedString self.atom = self.word self.expression = Forward() self.composite_expression = ( Suppress(self.lpar) + ZeroOrMore(self.expression) + Suppress(self.rpar))('composite_expression') self.composite_expression.addParseAction( self._composite_expression_to_tuple) self.expression << (self.atom | self.composite_expression) self.expressions = Group(ZeroOrMore(self.expression))('expressions') self.expressions.addParseAction(self._expressions_to_tuple) def parse_expression(self, instring): return self.expression.parseString(instring, parseAll=True)[0] def parse_expressions(self, instring): return self.expressions.parseString(instring, parseAll=True)[0] @staticmethod def _composite_expression_to_tuple(toks): return SExpression(toks.composite_expression) @staticmethod def _expressions_to_tuple(toks): return SExpressionList(toks.expressions)
def parseCTLK(spec): """Parse the spec and return the list of possible ASTs.""" global __ctlk if __ctlk is None: true = Literal("True") true.setParseAction(lambda tokens: TrueExp()) false = Literal("False") false.setParseAction(lambda tokens: FalseExp()) init = Literal("Init") init.setParseAction(lambda tokens: Init()) reachable = Literal("Reachable") reachable.setParseAction(lambda tokens: Reachable()) atom = "'" + SkipTo("'") + "'" atom.setParseAction(lambda tokens: Atom(tokens[1])) agent = atom group = Group(ZeroOrMore(agent + Suppress(",")) + agent) proposition = true | false | init | reachable | atom __ctlk = Forward() notproposition = "~" + proposition notproposition.setParseAction(lambda tokens: Not(tokens[1])) formula = (proposition | notproposition | Suppress("(") + __ctlk + Suppress(")")) logical = Forward() ex = Literal("E") + "X" + logical ex.setParseAction(lambda tokens: EX(tokens[2])) ax = Literal("A") + "X" + logical ax.setParseAction(lambda tokens: AX(tokens[2])) ef = Literal("E") + "F" + logical ef.setParseAction(lambda tokens: EF(tokens[2])) af = Literal("A") + "F" + logical af.setParseAction(lambda tokens: AF(tokens[2])) eg = Literal("E") + "G" + logical eg.setParseAction(lambda tokens: EG(tokens[2])) ag = Literal("A") + "G" + logical ag.setParseAction(lambda tokens: AG(tokens[2])) eu = Literal("E") + "[" + __ctlk + "U" + __ctlk + "]" eu.setParseAction(lambda tokens: EU(tokens[2], tokens[4])) au = Literal("A") + "[" + __ctlk + "U" + __ctlk + "]" au.setParseAction(lambda tokens: AU(tokens[2], tokens[4])) ew = Literal("E") + "[" + __ctlk + "W" + __ctlk + "]" ew.setParseAction(lambda tokens: EW(tokens[2], tokens[4])) aw = Literal("A") + "[" + __ctlk + "W" + __ctlk + "]" aw.setParseAction(lambda tokens: AW(tokens[2], tokens[4])) temporal = (ex | ax | ef | af | eg | ag | eu | au | ew | aw) nk = Literal("nK") + "<" + agent + ">" + logical nk.setParseAction(lambda tokens: nK(tokens[2], tokens[4])) k = Literal("K") + "<" + agent + ">" + logical k.setParseAction(lambda tokens: K(tokens[2], tokens[4])) ne = Literal("nE") + "<" + group + ">" + logical ne.setParseAction(lambda tokens: nE(list(tokens[2]), tokens[4])) e = Literal("E") + "<" + group + ">" + logical e.setParseAction(lambda tokens: E(list(tokens[2]), tokens[4])) nd = Literal("nD") + "<" + group + ">" + logical nd.setParseAction(lambda tokens: nD(list(tokens[2]), tokens[4])) d = Literal("D") + "<" + group + ">" + logical d.setParseAction(lambda tokens: D(list(tokens[2]), tokens[4])) nc = Literal("nC") + "<" + group + ">" + logical nc.setParseAction(lambda tokens: nC(list(tokens[2]), tokens[4])) c = Literal("C") + "<" + group + ">" + logical c.setParseAction(lambda tokens: C(list(tokens[2]), tokens[4])) epistemic = (nk | k | ne | e | nd | d | nc | c) logical <<= (formula | epistemic | temporal) __ctlk <<= (_logicals_(logical)) return __ctlk.parseString(spec, parseAll = True)
ELEMENT = Forward() EREF = Group(ELEMENT) ID = Word(alphanums+".") TYPE = Group((ID+Suppress(":")+ID | ID)) INSTANCE = ("i("+TYPE+")") CLASS = "c("+TYPE+")" LIST = "l("+EREF+")" DICT ="d("+EREF+","+EREF+")" TUPLE = "t("+OneOrMore(EREF+Suppress(Optional(",")))+")" ELEMENT << (INSTANCE | CLASS | LIST | DICT | TUPLE) print ELEMENT.parseString("i(borobudur.page:Page)") print ELEMENT.parseString("c(borobudur.page:Page)") print ELEMENT.parseString("l(i(borobudur.page:Page))") print ELEMENT.parseString("l(i(string))") print ELEMENT.parseString("d(l(i(string)),i(int))") print ELEMENT.parseString("t(i(string))") print ELEMENT.parseString("t(i(string), i(string))") class TestTypeHintProcessor(unittest.TestCase): def test_instance(self): hint = "anu i(int)" print processor.process(astng, hint) hint = "anu i(borobudur.page:Page)" print processor.process(astng, hint)
def __init__(self,text): rus_alphas = 'їійцукенгшщзхъфывапролджэячсмитьбюІЇЙЦУКЕНГШЩЗХЪФЫВАПРОЛДЖЭЯЧСМИТЬБЮ' comma = ',' string = OneOrMore(Word(alphas+rus_alphas+alphanums+'.')) string.setParseAction(lambda t:t) quoted_string = ( Suppress('"') + Optional(string) + Suppress('"')| Suppress("'") + Optional(string) + Suppress("'")| string ) ('string') def string_handler(t): asList = t.asList() if len(t) == 0: return { 'type':'string', 'value':'', } else: return { 'type':'string', 'value':asList[0] } quoted_string.setParseAction(debug_lambda( string_handler, comment="parsed string" ) ) number = OneOrMore(Word(nums+'.')) ('number') number.setParseAction(debug_lambda( lambda t:{ 'type':'number', 'value':t.asList()[0] }, comment="parsing number" ), ) value = Forward() member = Forward() array = Forward() dict_ = Forward() elements = delimitedList(value) ('elements') members = delimitedList(member) ('members') member << ( value+Suppress(':')+Optional(ZeroOrMore(' '))+value ) ('member') member.setParseAction(lambda t:{ 'type':'member', 'key':t.asList()[0], 'value':t.asList()[1] }) value << ( number| # string| quoted_string| array| dict_ ) ('value') array << (Suppress("[") + Optional(elements) + Suppress("]")) ('array') array.setParseAction(lambda t:{ 'type':'array', 'elements':t.asList() }) dict_ << (Suppress("{") + Optional(members) + Suppress("}")) ('dict') dict_.setParseAction(lambda t: { 'type': 'dict', 'members':t.asList() }) self.parsed = dict_.parseString(text)
def parse(expression, equation=False, subs=dict(), main=None, returnVars=False): if not isinstance(expression,str): return expression varSet = set() lparen = Literal("(").suppress() rparen = Literal(")").suppress() equal = Literal("=").suppress() dot = Literal(".") spec = { "E": exp(1), "Pi": pi } def getSymbol(s): varSet.add(s) if s in subs: s = subs[s] return symbols(s) def getFunction(s): if s[0] == "len": return SetLength(s[1]) elif s[0] == "sum": return SetSummation(s[1]) else: Error('Unknown slng function ' + s[0]) integer = Word(nums).setParseAction( lambda t: [ int(t[0]) ] ) decimal = Regex("[0-9]+\.[0-9]").setParseAction( lambda t: [float(t[0])]) special = Regex("[A-Z][a-zA-Z]*").setParseAction( lambda t: [spec[t[0]]]) var = Regex("[a-z][a-zA-Z]*").setParseAction( lambda t: [getSymbol(t[0])]) lowerName = Regex("[a-z][a-zA-Z]*").setParseAction( lambda t: [t[0]]) prop = Regex("[a-z][a-zA-Z]*\.[a-z][a-zA-Z]*").setParseAction( lambda t: [getSymbol(t[0])]) ref = Regex("\{[0-9]+\}").setParseAction( lambda t: [getSymbol(t[0])]) string = Regex('"[-0-9a-zA-Z: ]*"').setParseAction( lambda t: [t[0][1:-1]]) opn = { "+": (lambda a,b: a+b ), "-": (lambda a,b: a-b ), "*": (lambda a,b: a*b ), "/": (lambda a,b: a/b ), "^": (lambda a,b: a**b ) } ops = set(opn.keys()) def opClean(t): if len(t)==1: return t res = opClean([opn[t[1]](t[0],t[2])]+t[3:]) return res if main is not None: def treeCompute(p): try: node = main.fromDotRef(p) comp = hypergraph.treeCompute(node) res = solve(comp,symbols(p)) return res[0] except Exception as e: logging.exception(e) Error("Error with tree Compute: ") prop = prop.setParseAction( lambda t: [treeCompute(t[0])]) expr = Forward() paren = (lparen + expr + rparen).setParseAction( lambda s,l,t: t) function = (lowerName + lparen + (prop | var) + rparen).setParseAction( lambda t: getFunction(t) ) atom = function | string | paren | decimal | integer | ref | prop | special | var multExpr = (atom + ZeroOrMore( Word("*/") + atom)).setParseAction( lambda s,l,t: opClean(t)) expr << (multExpr + ZeroOrMore( Word("+-") + multExpr)).setParseAction( lambda s,l,t: opClean(t)) equality = (expr + equal + expr).setParseAction( lambda s,l,t: Eq(t[0],t[1]) ) if equation: res = equality.parseString(expression)[0] else: res = expr.parseString(expression)[0] if returnVars: return varSet else: return res
a\u0062c a:b\c~2.0 XY\u005a XY\u005A item:\ item:ABCD\ \ a\ or b a\:b\-c a\:b\+c a\:b\-c\* a\:b\+c\* a\:b\-c\~ a\:b\+c\~ a:b\c~ [ a\ TO a* ] """.splitlines() allpass = True for t in [_f for _f in map(str.strip,tests) if _f]: print(t) try: #~ expression.parseString(t,parseAll=True) print(expression.parseString(t,parseAll=True)) except ParseException as pe: print(t) print(pe) allpass = False print() print(("OK", "FAIL")[not allpass])
def pyparsing_parse(text): """ >>> import os >>> dirname = os.path.join(os.path.dirname(__file__), "data") >>> filename = os.path.join(dirname, "error1.blk") >>> with open(filename, encoding="utf8") as file: ... pyparsing_parse(file.read()) Traceback (most recent call last): ... ValueError: Error {0}: syntax error, line 8 >>> filename = os.path.join(dirname, "error2.blk") >>> with open(filename, encoding="utf8") as file: ... pyparsing_parse(file.read()) Traceback (most recent call last): ... ValueError: Error {0}: syntax error, line 1 >>> filename = os.path.join(dirname, "error3.blk") >>> with open(filename, encoding="utf8") as file: ... pyparsing_parse(file.read()) Traceback (most recent call last): ... ValueError: Error {0}: syntax error, line 4 >>> expected = "[white: ]\\n[lightblue: Director]\\n/\\n/\\n[white: ]\\n[lightgreen: Secretary]\\n/\\n/\\n[white: Minion #1]\\n[white: ]\\n[white: Minion #2]" >>> filename = os.path.join(dirname, "hierarchy.blk") >>> with open(filename, encoding="utf8") as file: ... blocks = pyparsing_parse(file.read()) >>> str(blocks).strip() == expected True >>> expected = "[#00CCDE: MessageBox Window\\n[lightgray: Frame\\n[white: ]\\n[white: Message text]\\n/\\n/\\n[goldenrod: OK Button]\\n[white: ]\\n[#ff0505: Cancel Button]\\n/\\n[white: ]\\n]\\n]" >>> filename = os.path.join(dirname, "messagebox.blk") >>> with open(filename, encoding="utf8") as file: ... blocks = pyparsing_parse(file.read()) >>> str(blocks).strip() == expected True """ def add_block(tokens): return Block.Block(tokens.name, tokens.color if tokens.color else "white") left_bracket, right_bracket = map(Suppress, "[]") new_rows = Word("/")("new_rows").setParseAction( lambda tokens: len(tokens.new_rows)) name = CharsNotIn("[]/\n")("name").setParseAction( lambda tokens: tokens.name.strip()) color = (Word("#", hexnums, exact=7) | Word(alphas, alphanums))("color") empty_node = (left_bracket + right_bracket).setParseAction( lambda: EmptyBlock) nodes = Forward() node_data = Optional(color + Suppress(":")) + Optional(name) node_data.setParseAction(add_block) node = left_bracket - node_data + nodes + right_bracket nodes << Group(ZeroOrMore(Optional(new_rows) + OneOrMore(node | empty_node))) stack = [Block.get_root_block()] try: results = nodes.parseString(text, parseAll=True) assert len(results) == 1 items = results.asList()[0] populate_children(items, stack) except (ParseException, ParseSyntaxException) as err: raise ValueError("Error {{0}}: syntax error, line " "{0}".format(err.lineno)) return stack[0]
def assignUsing(s): def assignPA(tokens): if s in tokens: tokens[tokens[s]] = tokens[0] del tokens[s] return assignPA GROUP = (MARK + Group( ZeroOrMore( (item + Optional(ATTRIBUTE)("attr") ).setParseAction(assignUsing("attr")) ) ) + ( WORD("name") | UNMARK ) ).setParseAction(assignUsing("name")) item << (NUMBER | FLOAT | STRING | BLOB | GROUP ) tests = """\ [ '10:1234567890' @name 25 @age +0.45 @percentage person:zed [ [ "hello" 1 child root [ "child" [ 200 '4:like' "I" "hello" things root [ [ "data" [ 2 1 ] @numbers child root [ [ 1 2 3 ] @test 4 5 6 root """.splitlines() for test in tests: if test: print(test) print(item.parseString(test).dump()) print()