コード例 #1
0
ファイル: peewee_mssql.py プロジェクト: ribx/peewee-mssql
    def generate_select(self, query, alias_map=None):
        model = query.model_class
        db = model._meta.database

        alias_map = self.calculate_alias_map(query, alias_map)

        if isinstance(query, CompoundSelect):
            clauses = [_StripParens(query)]
        else:
            if not query._distinct:
                clauses = [SQL('SELECT')]
            else:
                clauses = [SQL('SELECT DISTINCT')]
                if query._distinct not in (True, False):
                    clauses += [SQL('ON'), EnclosedClause(*query._distinct)]

            # basic support for query limit
            if query._limit is not None or (query._offset and db.limit_max):
                limit = query._limit if query._limit is not None else db.limit_max
                clauses.append(SQL('TOP %s' % limit))

            select_clause = Clause(*query._select)
            select_clause.glue = ', '

            clauses.extend((select_clause, SQL('FROM')))
            if query._from is None:
                clauses.append(model.as_entity().alias(alias_map[model]))
            else:
                clauses.append(CommaClause(*query._from))

        if query._windows is not None:
            clauses.append(SQL('WINDOW'))
            clauses.append(
                CommaClause(*[
                    Clause(SQL(window._alias), SQL('AS'), window.__sql__())
                    for window in query._windows
                ]))

        join_clauses = self.generate_joins(query._joins, model, alias_map)
        if join_clauses:
            clauses.extend(join_clauses)

        if query._where is not None:
            clauses.extend([SQL('WHERE'), query._where])

        if query._group_by:
            clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)])

        if query._having:
            clauses.extend([SQL('HAVING'), query._having])

        if query._order_by:
            clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)])

        # NO OFFSET SUPPORT

        if query._for_update:
            clauses.append(SQL(query._for_update))

        return self.build_query(clauses, alias_map)
コード例 #2
0
ファイル: migrate.py プロジェクト: rickstoolbox2014/peewee
    def _update_column(self, table, column_to_update, fn):
        # Get the SQL used to create the given table.
        create_table = self._get_create_table(table)

        # Parse out the `CREATE TABLE` and column list portions of the query.
        raw_create, raw_columns = self.column_re.search(create_table).groups()

        # Clean up the individual column definitions.
        column_defs = [
            col.strip() for col in self.column_split_re.findall(raw_columns)
        ]

        new_column_defs = []
        new_column_names = []
        original_column_names = []

        for column_def in column_defs:
            column_name, = self.column_name_re.match(column_def).groups()

            if column_name == column_to_update:
                new_column_def = fn(column_name, column_def)
                if new_column_def:
                    new_column_defs.append(new_column_def)
                    original_column_names.append(column_name)
                    column_name, = self.column_name_re.match(
                        new_column_def).groups()
                    new_column_names.append(column_name)
            else:
                new_column_defs.append(column_def)
                if not column_name.lower().startswith(('foreign', 'primary')):
                    new_column_names.append(column_name)
                    original_column_names.append(column_name)

        # Update the name of the new CREATE TABLE query.
        temp_table = table + '__tmp__'
        create = re.sub('("?)%s("?)' % table, '\\1%s\\2' % temp_table,
                        raw_create)

        # Create the new table.
        columns = ', '.join(new_column_defs)
        queries = [
            Clause(SQL('DROP TABLE IF EXISTS'), Entity(temp_table)),
            SQL('%s (%s)' % (create.strip(), columns))
        ]

        # Populate new table.
        populate_table = Clause(
            SQL('INSERT INTO'), Entity(temp_table),
            EnclosedClause(*[Entity(col) for col in new_column_names]),
            SQL('SELECT'),
            CommaClause(*[Entity(col) for col in original_column_names]),
            SQL('FROM'), Entity(table))
        queries.append(populate_table)

        # Drop existing table and rename temp table.
        queries.append(Clause(SQL('DROP TABLE'), Entity(table)))
        queries.append(self.rename_table(temp_table, table))

        return queries
コード例 #3
0
ファイル: peewee_ext.py プロジェクト: somair/halo
    def sql(self):
        query, params = super(InsertUpdateQuery, self).sql()

        compiler = self.compiler()
        update = []
        for field, value in compiler._sorted_fields(self._update):
            if not isinstance(value, (Node, Model)):
                value = Param(value)
            update.append(Expression(field, OP_EQ, value, flat=True))

        update_query, update_params = compiler.build_query(
            [CommaClause(*update)])
        query = '%s ON DUPLICATE KEY UPDATE %s' % (query, update_query)
        params.extend(update_params)

        return (query, params)
コード例 #4
0
    def _update_column(self, table, column_to_update, fn):
        columns = set(column.name.lower()
                      for column in self.database.get_columns(table))
        if column_to_update.lower() not in columns:
            raise ValueError('Column "%s" does not exist on "%s"' %
                             (column_to_update, table))

        # Get the SQL used to create the given table.
        table, create_table = self._get_create_table(table)

        # Get the indexes and SQL to re-create indexes.
        indexes = self.database.get_indexes(table)

        # Find any foreign keys we may need to remove.
        self.database.get_foreign_keys(table)

        # Parse out the `CREATE TABLE` and column list portions of the query.
        raw_create, raw_columns = self.column_re.search(create_table).groups()

        # Clean up the individual column definitions.
        column_defs = [
            col.strip() for col in self.column_split_re.findall(raw_columns)
        ]

        new_column_defs = []
        new_column_names = []
        original_column_names = []

        for column_def in column_defs:
            column_name, = self.column_name_re.match(column_def).groups()

            if column_name == column_to_update:
                new_column_def = fn(column_name, column_def)
                if new_column_def:
                    new_column_defs.append(new_column_def)
                    original_column_names.append(column_name)
                    column_name, = self.column_name_re.match(
                        new_column_def).groups()
                    new_column_names.append(column_name)
            else:
                new_column_defs.append(column_def)
                if not column_name.lower().startswith(('foreign', 'primary')):
                    new_column_names.append(column_name)
                    original_column_names.append(column_name)

        # Create a mapping of original columns to new columns.
        original_to_new = dict(zip(original_column_names, new_column_names))
        new_column = original_to_new.get(column_to_update)

        fk_filter_fn = lambda column_def: column_def
        if not new_column:
            # Remove any foreign keys associated with this column.
            fk_filter_fn = lambda column_def: None
        elif new_column != column_to_update:
            # Update any foreign keys for this column.
            fk_filter_fn = lambda column_def: self.fk_re.sub(
                'FOREIGN KEY ("%s") ' % new_column, column_def)

        cleaned_columns = []
        for column_def in new_column_defs:
            match = self.fk_re.match(column_def)
            if match is not None and match.groups()[0] == column_to_update:
                column_def = fk_filter_fn(column_def)
            if column_def:
                cleaned_columns.append(column_def)

        # Update the name of the new CREATE TABLE query.
        temp_table = table + '__tmp__'
        rgx = re.compile('("?)%s("?)' % table, re.I)
        create = rgx.sub('\\1%s\\2' % temp_table, raw_create)

        # Create the new table.
        columns = ', '.join(cleaned_columns)
        queries = [
            Clause(SQL('DROP TABLE IF EXISTS'), Entity(temp_table)),
            SQL('%s (%s)' % (create.strip(), columns))
        ]

        # Populate new table.
        populate_table = Clause(
            SQL('INSERT INTO'), Entity(temp_table),
            EnclosedClause(*[Entity(col) for col in new_column_names]),
            SQL('SELECT'),
            CommaClause(*[Entity(col) for col in original_column_names]),
            SQL('FROM'), Entity(table))
        queries.append(populate_table)

        # Drop existing table and rename temp table.
        queries.append(Clause(SQL('DROP TABLE'), Entity(table)))
        queries.append(self.rename_table(temp_table, table))

        # Re-create indexes.
        for index in indexes:
            # Auto-generated indexes in SQLite will not have associated SQL,
            # so pass over them.
            if not index.sql:
                continue

            if column_to_update in index.columns:
                if new_column:
                    queries.append(
                        SQL(index.sql.replace(column_to_update, new_column)))
            else:
                queries.append(SQL(index.sql))

        return queries
コード例 #5
0
    def generate_select(self, query, alias_map=None):
        model = query.model_class
        db = model._meta.database

        alias_map = self.calculate_alias_map(query, alias_map)

        if isinstance(query, CompoundSelect):
            clauses = [_StripParens(query)]
        else:
            if not query._distinct:
                clauses = [SQL('SELECT')]
            else:
                clauses = [SQL('SELECT DISTINCT')]
                if query._distinct not in (True, False):
                    clauses += [SQL('ON'), EnclosedClause(*query._distinct)]

            if query._limit:
                clauses.append(SQL('TOP %s' % query._limit))

            select_clause = Clause(*query._select)
            select_clause.glue = ', '

            clauses.extend((select_clause, SQL('FROM')))
            if query._from is None:
                clauses.append(model.as_entity().alias(alias_map[model]))
            else:
                clauses.append(CommaClause(*query._from))

        # WINDOW semantic is ignored due to lack of knowledge (OVER ...)
        # if query._windows is not None:
        #     clauses.append(SQL('WINDOW'))
        #     clauses.append(CommaClause(*[
        #         Clause(
        #             SQL(window._alias),
        #             SQL('AS'),
        #             window.__sql__())
        #         for window in query._windows]))

        join_clauses = self.generate_joins(query._joins, model, alias_map)
        if join_clauses:
            clauses.extend(join_clauses)

        if query._where is not None:
            clauses.extend([SQL('WHERE'), query._where])

        if query._group_by:
            clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)])

        if query._having:
            clauses.extend([SQL('HAVING'), query._having])

        if query._order_by:
            clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)])

        # if query._offset:
        #     clauses.append(SQL('OFFSET %s ROWS' % query._offset))
        #
        # if query._limit or (query._offset and db.limit_max):
        #     limit = query._limit or db.limit_max
        #     clauses.append(SQL('FETCH NEXT %s ROWS ONLY' % limit))

        if query._offset:
            raise NotImplementedError('OFFSET is not supported')

        # No locking semantics supported due to lack of knowledge (WITH ...)
        # for_update, no_wait = query._for_update
        # if for_update:
        #     stmt = 'FOR UPDATE NOWAIT' if no_wait else 'FOR UPDATE'
        #     clauses.append(SQL(stmt))

        return self.build_query(clauses, alias_map)