def _build_filter_parser(field_names): field = _build_field_expr(field_names) negation = CaselessKeyword('not') negation.setParseAction(lambda x: Negation(x[0])) comparison_operator = Group( Keyword('=') ^ Keyword('!=') ^ Keyword('>=') ^ Keyword('<=') ^ Keyword('<') ^ Keyword('>') ^ Keyword('lte') # match before lt ^ Keyword('lt') ^ Keyword('gte') # match before gt ^ Keyword('gt') ^ (Optional(negation) + (CaselessKeyword('contains') ^ CaselessKeyword('icontains') ^ CaselessKeyword('startswith') ^ CaselessKeyword('istartswith') ^ CaselessKeyword('endswith') ^ CaselessKeyword('iendswith') ^ CaselessKeyword('eq')))) comparison_operator.setParseAction(lambda x: Operator(x)) single_value_operator = Group( CaselessKeyword('isnull') ^ (Optional(negation) + CaselessKeyword('isnull'))) single_value_operator.setParseAction(lambda x: Operator(x)) plusorminus = Literal('+') | Literal('-') num_integer = Combine(Optional(plusorminus) + Word(nums)) num_integer.setParseAction(lambda x: Integer(x[0])) num_float = Combine(Optional(plusorminus) + Word(nums) + '.' + Word(nums)) num_float.setParseAction(lambda x: Float(x[0])) quoted_string = (QuotedString("'") ^ QuotedString('"')) quoted_string.setParseAction(lambda x: String(x[0])) boolean = Or([ CaselessKeyword(v) for v in BOOLEAN_TRUE_VALUES + BOOLEAN_FALSE_VALUES ]) boolean.setParseAction(lambda x: Boolean(x[0])) value = (quoted_string ^ num_integer ^ boolean) comparison = Group((field + comparison_operator + value) ^ (value + comparison_operator + field) ^ (field + comparison_operator + field) ^ (field + single_value_operator)) comparison.setParseAction(lambda x: Comparison(x)) invalid_comparison = Group( (value + comparison_operator + value).setParseAction(lambda x: fail( "Value may not be compared with values: {0}".format(' '.join(x))))) logical_op = Group(CaselessKeyword("and") | CaselessKeyword("or")) logical_op.setParseAction(lambda x: LogicalOp(x[0][0])) statement = Optional(comparison | invalid_comparison) + ZeroOrMore( logical_op + (comparison | invalid_comparison)) return statement
def _make_grammar(self): from pyparsing import (QuotedString, ZeroOrMore, Combine, Literal, Optional, OneOrMore, Regex, CaselessKeyword) def escape_handler(s, loc, toks): if toks[0] == '\\\\': return "\\" elif toks[0] == '\\\'': return "'" elif toks[0] == '\\"': return '"' elif toks[0] == '\\f': return "\f" elif toks[0] == '\\n': return "\n" elif toks[0] == '\\r': return "\r" elif toks[0] == '\\t': return "\t" elif toks[0] == '\\ ': return " " else: return toks[0][1:] escape = Combine(Regex(r'\\.')).setParseAction(escape_handler) word = Combine(OneOrMore(escape | Regex(r'[^\s\\]+'))) whitespace = Regex(r'\s+').suppress() quotedstring = Combine( OneOrMore( QuotedString('"', escChar='\\') | QuotedString("'", escChar='\\'))) command = Regex(r'[^\s:]+') + Literal(":").suppress() + (quotedstring | word) include = quotedstring | command | word exclude = (Literal("-") | Literal("^")).suppress() + (quotedstring | command | word) or_keyword = CaselessKeyword("or") and_keyword = CaselessKeyword("and") keyword = or_keyword | and_keyword argument = (keyword | exclude | include) expr = ZeroOrMore(Optional(whitespace) + argument) # arguments.leaveWhitespace() command.setParseAction(CommandExpr) include.setParseAction(IncludeExpr) exclude.setParseAction(ExcludeExpr) or_keyword.setParseAction(OrKeywordExpr) and_keyword.setParseAction(AndKeywordExpr) # or_expr.setParseAction(lambda s, loc, toks: OrOperator(toks[0], toks[2])) # and_expr.setParseAction(lambda s, loc, toks: AndOperator(toks[0], toks[2])) # no_expr.setParseAction(lambda s, loc, toks: AndOperator(toks[0], toks[1])) # expr.setParseAction(Operator) return expr
def _make_grammar(self): from pyparsing import (QuotedString, ZeroOrMore, Combine, Literal, Optional, OneOrMore, Regex, CaselessKeyword) def escape_handler(s, loc, toks): if toks[0] == '\\\\': return "\\" elif toks[0] == '\\\'': return "'" elif toks[0] == '\\"': return '"' elif toks[0] == '\\f': return "\f" elif toks[0] == '\\n': return "\n" elif toks[0] == '\\r': return "\r" elif toks[0] == '\\t': return "\t" elif toks[0] == '\\ ': return " " else: return toks[0][1:] escape = Combine(Regex(r'\\.')).setParseAction(escape_handler) word = Combine(OneOrMore(escape | Regex(r'[^\s\\]+'))) whitespace = Regex(r'\s+').suppress() quotedstring = Combine(OneOrMore(QuotedString('"', escChar='\\') | QuotedString("'", escChar='\\'))) command = Regex(r'[^\s:]+') + Literal(":").suppress() + (quotedstring | word) include = quotedstring | command | word exclude = (Literal("-") | Literal("^")).suppress() + (quotedstring | command | word) or_keyword = CaselessKeyword("or") and_keyword = CaselessKeyword("and") keyword = or_keyword | and_keyword argument = (keyword | exclude | include) expr = ZeroOrMore(Optional(whitespace) + argument) # arguments.leaveWhitespace() command.setParseAction(CommandExpr) include.setParseAction(IncludeExpr) exclude.setParseAction(ExcludeExpr) or_keyword.setParseAction(OrKeywordExpr) and_keyword.setParseAction(AndKeywordExpr) # or_expr.setParseAction(lambda s, loc, toks: OrOperator(toks[0], toks[2])) # and_expr.setParseAction(lambda s, loc, toks: AndOperator(toks[0], toks[2])) # no_expr.setParseAction(lambda s, loc, toks: AndOperator(toks[0], toks[1])) # expr.setParseAction(Operator) return expr
def parse(self, key, value): """ Parse the ACL/filters in the [ACL] section. They have the following format: <who> = <filter>: <requests> Where: who is one or more of: @group or a username (user connecting to Sepiida) filter is one or more of: ALL or @group or sameLocation requests is one or more of: ALL or request name (not currently checked) """ def failToken(s): """ pyparsing hack to give better error messages, "a or b expected" rather than "b expected". """ t = NoMatch() t.setName(s) return t identifierChars = alphanums + '_-' group = Word('@', identifierChars) kwAll = CaselessKeyword('ALL') kwSameLocation = CaselessKeyword('sameLocation') # make sure the case is always the same after parsing kwAll.setParseAction(lambda tokens: 'ALL') kwSameLocation.setParseAction(lambda tokens: 'sameLocation') user = ~(kwAll | kwSameLocation) + Word(identifierChars) request = ~(kwAll | kwSameLocation) + Word(identifierChars) request.setParseAction(lambda tokens: tokens[0].lower()) who = Group(OneOrMore(failToken("@group or username") | group | user)).setResultsName('who') filter_ = Group(failToken("ALL or sameLocation or @group") | kwAll | OneOrMore(group | kwSameLocation)).setResultsName('filter') requests = Group(failToken("ALL or request name") | kwAll | OneOrMore(request)).setResultsName('requests') lhs = who + stringEnd rhs = filter_ + Suppress(':') + requests + stringEnd r1 = lhs.parseString(key) r2 = rhs.parseString(value) self._who = r1.who.asList() self._filter = r2.filter.asList() self._allowedRequests = r2.requests.asList()
def parse(self, query): """Parses a query string.""" # Parse instructions quoted_string = QuotedString(quoteChar='"', escChar='\\', unquoteResults=True) field_name = Word(alphas, alphanums + '_') subexpression = Forward() boolean_expression = Forward() binary_operator = Literal('=') | Literal('<=') | Literal('<') | Literal('>=') | Literal('>') boolean_operator = CaselessKeyword('AND') | CaselessKeyword('OR') boolean_not = CaselessKeyword('NOT') boolean_value = CaselessKeyword("true") ^ CaselessKeyword("false") integer = Word(nums) rvalue = quoted_string ^ boolean_value ^ integer field_to_value = field_name + binary_operator + rvalue expression = Optional(boolean_not) + ((subexpression + ZeroOrMore(boolean_expression)) | (field_to_value + ZeroOrMore(boolean_expression))) boolean_expression << boolean_operator + expression left_bracket = Literal('(') right_bracket = Literal(')') subexpression << (left_bracket + expression + right_bracket) search_query = expression # Parse actions for emitting special cases field_to_value.setParseAction(do_field_to_value) boolean_operator.setParseAction(do_boolean_operator) boolean_not.setParseAction(do_boolean_operator) boolean_value.setParseAction(do_boolean_value) integer.setParseAction(do_integer) left_bracket.setParseAction(do_bracket) right_bracket.setParseAction(do_bracket) self.tokens = search_query.parseString(query)
integer | var | attr_open + Optional(dictMembers) + attr_close | sets | list_open + Group(delimitedList(list_member_val)) + list_close ) memberDef = Dict(Group(field_name + colon + field_val)) dictMembers << delimitedList(memberDef) attributes = attr_open + Optional(dictMembers).setResultsName("attributes") +\ attr_close merge = CaselessKeyword("MERGE") merge.setParseAction(lambda t: t[0].lower()) method = CaselessKeyword("METHOD") method.setParseAction(lambda t: t[0].lower()) edges = CaselessKeyword("EDGES") edges.setParseAction(lambda t: t[0].lower()) union = CaselessKeyword("UNION") as_keyword = CaselessKeyword("AS") intersection = CaselessKeyword("INTERSECTION") method_id = (union | intersection) method_id.setParseAction(lambda t: t[0].lower()) merge = merge + list_open +\ Group(list_of_nodes).setResultsName("nodes") +\ list_close +\ Optional(method + method_id.setResultsName("method")) +\ Optional(as_keyword + node.setResultsName("node_name")) +\ Optional(edges + method_id.setResultsName("edges_method"))
""" Parses the projx NetworkX DSL. """ from itertools import cycle, islice from pyparsing import (Word, alphanums, ZeroOrMore, stringEnd, Suppress, Literal, CaselessKeyword, Optional, Forward, quotedString, removeQuotes) # Used throughout as a variable/attr name. var = Word(alphanums, "_" + alphanums) ############### MATCH STATEMENT ###################### match = CaselessKeyword("MATCH") graph = CaselessKeyword("GRAPH") | CaselessKeyword("SUBGRAPH") graph.setParseAction(lambda t: t[0].lower()) ################ Transformations ####################### transformation = (CaselessKeyword("TRANSFER") | CaselessKeyword("PROJECT") | CaselessKeyword("COMBINE")) transformation.setParseAction(lambda t: t[0].lower()) ################ NODE AND EDGE PATTERNS ################### # Used for node and edge patterns. seperator = Suppress(Literal(":")) tp = seperator + Word(alphanums, "_" + alphanums) # Node type pattern. node_open = Suppress(Literal("("))
value = [vs[0] + ", ".join(vs[1:-1]) + vs[-1]] res += prefix + value + [";\n"] return res createToken = Keyword("CREATE") databaseToken = Keyword("DATABASE") tableToken = Keyword("TABLE") ifneToken = Keyword("IF") + Keyword("NOT") + Keyword("EXISTS") nullToken = Keyword("NULL") nnToken = Keyword("NOT") + nullToken collateToken = Keyword("COLLATE") dcsToken = Keyword("DEFAULT") + Keyword("CHARACTER") + Keyword("SET") useToken = Keyword("USE") defaultToken = Keyword("DEFAULT") unsignedToken = Keyword("UNSIGNED") autoincrementToken = Keyword("AUTO_INCREMENT") autoincrementToken.setParseAction(lambda toks: ["PRIMARY KEY AUTOINCREMENT"]) keyToken = Keyword("KEY") primaryToken = Keyword("PRIMARY") uniqueToken = Keyword("UNIQUE") insertToken = Keyword("INSERT") intoToken = Keyword("INTO") valuesToken = Keyword("VALUES") ident = Word(alphas, alphanums + "_$" ) ^ QuotedString('"') ^ QuotedString("`") ident.setParseAction(lambda toks: ['"%s"' % toks[0]]) string = QuotedString("'",multiline=True) string.setParseAction(lambda toks: ["'%s'" % toks[0]]) columnName = delimitedList( ident, ".",combine=True) tableName = delimitedList( ident, ".",combine=True) dataType = Word(alphas) + Combine(Optional(Literal("(") + (Word(nums) ^ delimitedList(string,combine=True)) + Literal(")"))) + ZeroOrMore(nnToken ^ autoincrementToken ^ (defaultToken + (string ^ nullToken)) ^ unsignedToken.suppress() )
Parses the projx NetworkX DSL. """ from itertools import cycle, islice from pyparsing import (Word, alphanums, ZeroOrMore, stringEnd, Suppress, Literal, CaselessKeyword, Optional, Forward, quotedString, removeQuotes) # Used throughout as a variable/attr name. var = Word(alphanums, "_" + alphanums) ############### MATCH STATEMENT ###################### match = CaselessKeyword("MATCH") graph = CaselessKeyword("GRAPH") | CaselessKeyword("SUBGRAPH") graph.setParseAction(lambda t: t[0].lower()) ################ Transformations ####################### transformation = ( CaselessKeyword("TRANSFER") | CaselessKeyword("PROJECT") | CaselessKeyword("COMBINE") ) transformation.setParseAction(lambda t: t[0].lower()) ################ NODE AND EDGE PATTERNS ################### # Used for node and edge patterns. seperator = Suppress(Literal(":")) tp = seperator + Word(alphanums, "_" + alphanums)
comment = Group(NotAny(eol) + Literal('!') + SkipTo(eol)).suppress() # comment kw = Group(Regex('[A-Za-z][a-zA-Z0-9]+') + Optional(comment) + eol) # any keyword first_line = eos + SkipTo(eol) + eol # first line with title block_n = Group(OneOrMore(NotAny(eol) + Number) + Optional(comment) + eol) # sequence of number until end of line # Basis grammar comment = Group(Literal('!') + SkipTo(eol)).suppress() l = CaselessKeyword('S') | CaselessKeyword('P') | CaselessKeyword('D') \ | CaselessKeyword('F') | CaselessKeyword('G') l_sp = CaselessKeyword('SP') shell = Group(l.setParseAction(ppc.upcaseTokens) + Number + Number) shell_sp = Group(l_sp.setParseAction(ppc.upcaseTokens) + Number + Number) exponent = Group(Number + Number) exponent_sp = Group(Number + Number + Number) bs = Group(shell + OneOrMore(exponent)) bs_sp = Group(shell_sp + OneOrMore(exponent_sp)) endbs = Word('*').suppress() basis_set = Group( Str.setResultsName('bdescr') + Number + OneOrMore(bs | bs_sp).setResultsName('basis') + Optional(endbs))
def sql2table_list(tables, show_columns=True): def field_act(s, loc, tok): return " ".join(tok).replace('\n', '\\n') def field_list_act(s, loc, tok): return tok def create_table_act(s, loc, tok): table = Table(tok["tableName"], None, {}, {}) for t in tok["fields"]: if str(t).startswith("FK:"): l = t[3:].split(":") if len(l) > 2: table.fkeys[l[0]] = {"ftable": l[1], "fcoloumn": l[2]} else: table.fkeys[l[0]] = {"ftable": l[1]} elif str(t).startswith("PK:"): table.pk = t[3:] elif str(t).startswith("KEY:"): pass else: l = t.split(" ") table.columns[l[0]] = " ".join(l[1:]) tables.append(table) def add_fkey_act(s, loc, tok): return '{tableName}:{keyName}:{fkTable}:{fkCol}'.format(**tok) def fkey_act(s, loc, tok): return 'FK:{keyName}:{fkTable}:{fkCol}'.format(**tok) def fkey_nocols_act(s, loc, tok): return 'FK:{keyName}:{fkTable}'.format(**tok) # def fkey_list_act(s, loc, tok): # return "\n ".join(tok) def other_statement_act(s, loc, tok): pass def join_string_act(s, loc, tok): return "".join(tok).replace('\n', '\\n') def quoted_default_value_act(s, loc, tok): return tok[0] + " " + "".join(tok[1::]) def pk_act(s, loc, tok): return 'PK:{primary_key}'.format(**tok) def k_act(s, loc, tok): pass def no_act(s, loc, tok): pass string = Regex('[a-zA-Z0-9=_]+') ws = OneOrMore(White()).suppress() lp = Regex('[(]').suppress() rp = Regex('[)]').suppress() c = Regex('[,]').suppress() q = Regex("[`]").suppress() parenthesis = Forward() parenthesis <<= "(" + ZeroOrMore(CharsNotIn("()") | parenthesis) + ")" parenthesis.setParseAction(join_string_act) quoted_string = "'" + ZeroOrMore(CharsNotIn("'")) + "'" quoted_string.setParseAction(join_string_act) quoted_default_value = "DEFAULT" + quoted_string + OneOrMore( CharsNotIn(", \n\t")) quoted_default_value.setParseAction(quoted_default_value_act) column_comment = CaselessKeyword("COMMENT") + quoted_string primary_key = CaselessKeyword('PRIMARY').suppress() + CaselessKeyword( "KEY").suppress() + lp + string.setResultsName('primary_key') + rp primary_key.ignore("`") primary_key.setParseAction(pk_act) key_def = Optional(CaselessKeyword('UNIQUE').suppress()) + CaselessKeyword( 'KEY').suppress() + Word(alphanums + "_") + lp + delimitedList( string.setResultsName('key'), delim=",") + rp key_def.ignore("`") key_def.setParseAction(k_act) fkey_def = CaselessKeyword("CONSTRAINT") + Word( alphanums + "_" ) + CaselessKeyword("FOREIGN") + CaselessKeyword("KEY") + lp + Word( alphanums + "_" ).setResultsName("keyName") + rp + CaselessKeyword("REFERENCES") + Word( alphanums + "._").setResultsName("fkTable") + lp + Word( alphanums + "_").setResultsName("fkCol") + rp + Optional( CaselessKeyword("DEFERRABLE") ) + Optional( CaselessKeyword("ON") + (CaselessKeyword("DELETE") | CaselessKeyword("UPDATE")) + (CaselessKeyword("CASCADE") | CaselessKeyword("RESTRICT") | CaselessKeyword("NO ACTION") | CaselessKeyword("SET NULL")) ) + Optional( CaselessKeyword("ON") + (CaselessKeyword("DELETE") | CaselessKeyword("UPDATE")) + (CaselessKeyword("CASCADE") | CaselessKeyword("RESTRICT") | CaselessKeyword("NO ACTION") | CaselessKeyword("SET NULL"))) fkey_def.ignore("`") if show_columns: fkey_def.setParseAction(fkey_act) else: fkey_def.setParseAction(fkey_nocols_act) #fkey_list_def = ZeroOrMore(Suppress(",") + fkey_def) #fkey_list_def.setParseAction(fkey_list_act) field_def = Word(alphanums + "_\"':-/[].") + Word( alphanums + "_\"':-/[].") + Optional( CaselessKeyword("NOT NULL") | CaselessKeyword("DEFAULT") + Word(alphanums + "_\"':-/[].")) + Optional( OneOrMore(quoted_default_value | column_comment | Word(alphanums + "_\"'`:-/[].") | parenthesis)) field_def.ignore("`") # if columns: field_def.setParseAction(field_act) # else: # field_def.setParseAction(no_act) field_list_def = delimitedList(\ (primary_key.suppress() | \ key_def.suppress() | \ fkey_def | \ field_def \ ), delim=","\ ) #if columns else field_def.suppress() field_list_def.setParseAction(field_list_act) tablename_def = (Word(alphanums + "_.") | QuotedString("\"")) tablename_def.ignore("`") create_table_def = CaselessKeyword("CREATE").suppress() + CaselessKeyword( "TABLE").suppress() + tablename_def.setResultsName( "tableName") + lp + field_list_def.setResultsName( "fields") + rp + ZeroOrMore( Word(alphanums + "_\"'`:-/[].=")) + Word(";").suppress() create_table_def.setParseAction(create_table_act) add_fkey_def = CaselessKeyword( "ALTER") + "TABLE" + "ONLY" + tablename_def.setResultsName( "tableName") + "ADD" + "CONSTRAINT" + Word( alphanums + "_" ) + "FOREIGN" + "KEY" + "(" + Word(alphanums + "_").setResultsName( "keyName") + ")" + "REFERENCES" + Word( alphanums + "._").setResultsName("fkTable") + "(" + Word( alphanums + "_" ).setResultsName("fkCol") + ")" + Optional( Literal("DEFERRABLE")) + Optional( Literal("ON") + "DELETE" + (Literal("CASCADE") | Literal("RESTRICT"))) + ";" add_fkey_def.setParseAction(add_fkey_act) other_statement_def = OneOrMore(CharsNotIn(";")) + ";" other_statement_def.setParseAction(other_statement_act) comment_def = "--" + ZeroOrMore(CharsNotIn("\n")) comment_def.setParseAction(other_statement_act) return OneOrMore(comment_def | create_table_def | add_fkey_def | other_statement_def)
node_open = Suppress(Literal("(")) node_close = Suppress(Literal(")")) label_start = Suppress(Literal(":")) property_start = Suppress(Literal(".")) rel_source = Suppress(Literal("-")) rel_target_left = Suppress(Literal("<-")) rel_target_right = Suppress(Literal("->")) rel_open = Suppress(Literal("[")) rel_close = Suppress(Literal("]")) match = CaselessKeyword("MATCH") match.setParseAction(lambda t: t[0].lower()) where = CaselessKeyword("WHERE") where.setParseAction(lambda t: t[0].lower()) and_connective = CaselessKeyword("AND") and_connective.setParseAction(lambda t: t[0].lower()) or_connective = CaselessKeyword("OR") or_connective.setParseAction(lambda t: t[0].lower()) return_kw = CaselessKeyword("RETURN") return_kw.setParseAction(lambda t: t[0].lower()) star = Suppress(Literal("*")) # star.setParseAction(lambda t: t[0])