Esempio n. 1
0
 def _group(stream):
     for stmt in stream:
         if strip_comments_only:
             grouping.group_comments(stmt)
         else:
             grouping.group(stmt)
         yield stmt
Esempio n. 2
0
 def _group(stream):
     for stmt in stream:
         if strip_comments_only:
             grouping.group_comments(stmt)
         else:
             grouping.group(stmt)
         yield stmt
Esempio n. 3
0
    def run(self, sql, encoding=None):
        stream = lexer.tokenize(sql, encoding)
        # Process token stream
        for filter_ in self.preprocess:
            stream = filter_.process(stream)

        stream = StatementSplitter().process(stream)

        # Output: Stream processed Statements
        for stmt in stream:
            if self._grouping:
                stmt = grouping.group(stmt)

            for filter_ in self.stmtprocess:
                filter_.process(stmt)

            for filter_ in self.postprocess:
                stmt = filter_.process(stmt)

            yield stmt
    def run(self, sql, encoding=None):
        stream = lexer.tokenize(sql, encoding)
        # Process token stream
        for filter_ in self.preprocess:
            stream = filter_.process(stream)

        stream = StatementSplitter().process(stream)

        # Output: Stream processed Statements
        for stmt in stream:
            if self._grouping:
                stmt = grouping.group(stmt)

            for filter_ in self.stmtprocess:
                filter_.process(stmt)

            for filter_ in self.postprocess:
                stmt = filter_.process(stmt)

            yield stmt
Esempio n. 5
0
    def run(self, sql, encoding=None):
        stream = lexer.tokenize(sql, encoding)
        # Process token stream
        for filter_ in self.preprocess:
            stream = filter_.process(stream)

        # stream contains leaf of all tokens 
        stream = StatementSplitter().process(stream)

        # Output: Stream processed Statements
        # now grouping makes semantically idenfiable group
        for stmt in stream:            
            if self._grouping:                
                stmt = grouping.group(stmt, self.query_reduction)
            for filter_ in self.stmtprocess:
                filter_.process(stmt)

            for filter_ in self.postprocess:
                stmt = filter_.process(stmt)

            yield stmt
Esempio n. 6
0
    def run(self, sql, encoding=None):
        stream = lexer.tokenize(
            sql, encoding)  # 通过词法分析器的分析,吐出一个流,其中信息均以(tokentype, value)的形式存在
        # Process token stream
        # 遍历前处理列表中的过滤器
        for filter_ in self.preprocess:
            stream = filter_.process(stream)  # 前处理的过滤器开始进行处理

        stream = StatementSplitter().process(stream)  # 整理前处理结果

        # Output: Stream processed Statements
        # 进行事中处理和后处理
        for stmt in stream:
            if self._grouping:
                stmt = grouping.group(stmt)

            for filter_ in self.stmtprocess:
                filter_.process(stmt)

            for filter_ in self.postprocess:
                stmt = filter_.process(stmt)

            yield stmt
    def run(self, sql, encoding=None):
        stream = ELexer().get_tokens(sql, encoding)
        # Process token stream
        for filter_ in self.preprocess:
            stream = filter_.process(stream)

        splitter = EStatementSplitter()
        stream = splitter.process(stream)

        # Output: Stream processed Statements
        for params in stream:
            pos, stmt = params
            origin_stmt = str(stmt)
            if self._grouping:
                stmt = grouping.group(stmt)

            for filter_ in self.stmtprocess:
                filter_.process(stmt)

            for filter_ in self.postprocess:
                stmt = filter_.process(stmt)

            yield pos, stmt, origin_stmt
Esempio n. 8
0
 def _group(stream):
     # modified by rrana
     pass
     for stmt in stream:
         grouping.group(stmt) 
         yield stmt
Esempio n. 9
0
 def _group(stream):
     for stmt in stream:
         grouping.group(stmt)
         yield stmt
Esempio n. 10
0
 def _group_token(self, statement):
     if self._grouping:
         grouping.group(statement, self.grouping_funcs)
     return statement