def includefile(self, path): with open(path) as f: raw_sql = f.read() # Create new FilterStack to parse readed file # and add all its tokens to the main stack recursively stack = FilterStack() stack.preprocess.append(IncludeStatement(self.dirpaths, self.maxRecursive - 1, self.raiseexceptions)) for tv in stack.run(raw_sql): yield tv
def process(self, stack, stream): # Run over all tokens in the stream for token_type, value in stream: # INCLUDE statement found, set detected mode if token_type in Name and value.upper() == 'INCLUDE': self.detected = True continue # INCLUDE statement was found, parse it elif self.detected: # Omit whitespaces if token_type in Whitespace: pass # Get path of file to include path = None if token_type in String.Symbol: # if token_type in tokens.String.Symbol: path = join(self.dirpath, value[1:-1]) # Include file if path was found if path: try: f = open(path) raw_sql = f.read() f.close() except IOError: err = sys.exc_info()[0] yield Comment, u'-- IOError: %s\n' % err else: # Create new FilterStack to parse readed file # and add all its tokens to the main stack recursively # [ToDo] Add maximum recursive iteration value stack = FilterStack() stack.preprocess.append(IncludeStatement(self.dirpath)) for tv in stack.run(raw_sql): yield tv # Set normal mode self.detected = False # Don't include any token while in detected mode continue # Normal token yield token_type, value
def get_sqlparse_stack(params_register, get_macro): """ Get an sqlparse filter stack for Cerebrum SQL. .. warning:: The resulting filter stack is stateful -- it should only be used to parse a single statement! """ stack = FilterStack() # Translate macros from Name to MacroToken -- must come first stack.preprocess.append(IdentifyMacroFilter()) # Translate identified MacroToken values into valid SQL stack.preprocess.append(ProcessMacroFilter(get_macro)) # TODO: Remove when we are sure no queries will fail stack.preprocess.append(_FindWhitespaceErrors(log=True, fix=True)) # Translate placeholders into the proper paramstyle, and register # placeholder names. stack.preprocess.append(TranslatePlaceholderFilter(params_register)) # NOTE: Do not enable grouping, it'll double the cost of stack.run(). # # Observe that other filters may *require* grouping -- using # build_filter_stack may cause grouping to be enabled! # stack.enable_grouping() return stack
def process(self, stack, stream): # Run over all tokens in the stream for token_type, value in stream: # INCLUDE statement found, set detected mode if token_type in Name and value.upper() == 'INCLUDE': self.detected = True continue # INCLUDE statement was found, parse it elif self.detected: # Omit whitespaces if token_type in Whitespace: continue # Found file path to include if token_type in String.Symbol: # if token_type in tokens.String.Symbol: # Get path of file to include path = join(self.dirpath, value[1:-1]) try: f = open(path) raw_sql = f.read() f.close() # There was a problem loading the include file except IOError, err: # Raise the exception to the interpreter if self.raiseexceptions: raise # Put the exception as a comment on the SQL code yield Comment, u'-- IOError: %s\n' % err else: # Create new FilterStack to parse readed file # and add all its tokens to the main stack recursively try: filtr = IncludeStatement(self.dirpath, self.maxRecursive - 1, self.raiseexceptions) # Max recursion limit reached except ValueError, err: # Raise the exception to the interpreter if self.raiseexceptions: raise # Put the exception as a comment on the SQL code yield Comment, u'-- ValueError: %s\n' % err stack = FilterStack() stack.preprocess.append(filtr) for tv in stack.run(raw_sql): yield tv # Set normal mode self.detected = False # Don't include any token while in detected mode continue
def process(self, stack, stream): # Run over all tokens in the stream for token_type, value in stream: # INCLUDE statement found, set detected mode if token_type in Name and value.upper() == "INCLUDE": self.detected = True continue # INCLUDE statement was found, parse it elif self.detected: # Omit whitespaces if token_type in Whitespace: continue # Found file path to include if token_type in String.Symbol: # if token_type in tokens.String.Symbol: # Get path of file to include path = join(self.dirpath, value[1:-1]) try: f = open(path) raw_sql = f.read() f.close() # There was a problem loading the include file except IOError as err: # Raise the exception to the interpreter if self.raiseexceptions: raise # Put the exception as a comment on the SQL code yield Comment, "-- IOError: %s\n" % err else: # Create new FilterStack to parse readed file # and add all its tokens to the main stack recursively try: filtr = IncludeStatement(self.dirpath, self.maxRecursive - 1, self.raiseexceptions) # Max recursion limit reached except ValueError as err: # Raise the exception to the interpreter if self.raiseexceptions: raise # Put the exception as a comment on the SQL code yield Comment, "-- ValueError: %s\n" % err stack = FilterStack() stack.preprocess.append(filtr) for tv in stack.run(raw_sql): yield tv # Set normal mode self.detected = False # Don't include any token while in detected mode continue # Normal token yield token_type, value