示例#1
0
    def tokens(self) -> List[SQLToken]:
        """
        Tokenizes the query
        """
        if self._tokens is not None:
            return self._tokens

        parsed = sqlparse.parse(self._query)
        tokens = []
        # handle empty queries (#12)
        if not parsed:
            return tokens
        self._get_sqlparse_tokens(parsed)
        last_keyword = None
        combine_flag = False
        for index, tok in enumerate(self.non_empty_tokens):
            # combine dot separated identifiers
            if self._is_token_part_of_complex_identifier(token=tok, index=index):
                combine_flag = True
                continue
            token = SQLToken(
                tok=tok,
                index=index,
                subquery_level=self._subquery_level,
                last_keyword=last_keyword,
            )
            if combine_flag:
                self._combine_qualified_names(index=index, token=token)
                combine_flag = False

            previous_token = tokens[-1] if index > 0 else EmptyToken
            token.previous_token = previous_token
            previous_token.next_token = token if index > 0 else None

            if token.is_left_parenthesis:
                token.token_type = TokenType.PARENTHESIS
                self._determine_opening_parenthesis_type(token=token)
            elif token.is_right_parenthesis:
                token.token_type = TokenType.PARENTHESIS
                self._determine_closing_parenthesis_type(token=token)

            last_keyword = self._determine_last_relevant_keyword(
                token=token, last_keyword=last_keyword
            )
            token.is_in_nested_function = self._is_in_nested_function
            token.parenthesis_level = self._parenthesis_level
            tokens.append(token)

        self._tokens = tokens
        # since tokens are used in all methods required parsing (so w/o generalization)
        # we set the query type here (and not in init) to allow for generalization
        # but disallow any other usage for not supported queries to avoid unexpected
        # results which are not really an error
        _ = self.query_type
        return tokens
示例#2
0
 def _handle_column_save(self, token: SQLToken, columns: List[str]):
     column = token.table_prefixed_column(self.tables_aliases)
     if self._is_with_query_already_resolved(column):
         self._add_to_columns_aliases_subsection(token=token, left_expand=False)
         token.token_type = TokenType.COLUMN_ALIAS
         return
     column = self._resolve_sub_queries(column)
     self._add_to_columns_with_tables(token, column)
     self._add_to_columns_subsection(
         keyword=token.last_keyword_normalized, column=column
     )
     token.token_type = TokenType.COLUMN
     columns.extend(column)
示例#3
0
 def _handle_column_alias_subquery_level_update(self, token: SQLToken) -> None:
     token.token_type = TokenType.COLUMN_ALIAS
     self._add_to_columns_aliases_subsection(token=token)
     current_level = self._column_aliases_max_subquery_level.setdefault(
         token.value, 0
     )
     if token.subquery_level > current_level:
         self._column_aliases_max_subquery_level[token.value] = token.subquery_level
示例#4
0
 def _handle_with_name_save(token: SQLToken, with_names: List[str]) -> None:
     if token.is_right_parenthesis:
         # inside columns of with statement
         # like: with (col1, col2) as (subquery)
         token.is_with_columns_end = True
         token.is_nested_function_end = False
         start_token = token.find_nearest_token("(")
         start_token.is_with_columns_start = True
         start_token.is_nested_function_start = False
         prev_token = start_token.previous_token
         prev_token.token_type = TokenType.WITH_NAME
         with_names.append(prev_token.value)
     else:
         token.token_type = TokenType.WITH_NAME
         with_names.append(token.value)