def _parse_tokens(self, token_stream, parser_rule): ret = None self.parser = SQLiteParser(token_stream) try: ret = getattr(self.parser, parser_rule)() except Exception as e: print "SQLParser error:", e return ret
class SQLParser(object): def __init__(self): self.parser = None def _lexical_analysis(self, query): self.tokens = [] query_stream = antlr3.ANTLRStringStream(query) lexer = SQLiteLexer(query_stream) for token in lexer: self.tokens.append(token.getType()) lexer.reset() return antlr3.CommonTokenStream(lexer) def _parse_tokens(self, token_stream, parser_rule): ret = None self.parser = SQLiteParser(token_stream) try: ret = getattr(self.parser, parser_rule)() except Exception as e: print "SQLParser error:", e return ret def _classify(self, query, parser_rule): token_stream = self._lexical_analysis(query) result = self._parse_tokens(token_stream, parser_rule) if result is not None: if result.tree is not None: self.tree = result.tree.toStringTree() return result def parser_query(self, query, rule="sql_stmt_list"): query = urllib.unquote(query) while len(query) > 0: res = self._classify(query, rule) if res == None: break syntax_errors = self.parser.getNumberOfSyntaxErrors() if syntax_errors == 0: break if res.getStart().stop == None: break else: if "mismatched input '<EOF>' expecting RPAREN" in self.parser.error: query += ')' if "SLEEP" in query.upper(): query = 'SELECT ' + query[res.getStart().stop + 3:] else: query = query[res.getStart().stop + 1:]