def _create_table(self, model_class, safe=False): statement = 'CREATE TABLE IF NOT EXISTS' if safe else 'CREATE TABLE' meta = model_class._meta columns, constraints = [], [] if meta.composite_key: pk_cols = [ meta.fields[f].as_entity() for f in meta.primary_key.field_names ] constraints.append( Clause(SQL('PRIMARY KEY'), EnclosedClause(*pk_cols))) for field in meta.sorted_fields: columns.append(self.field_definition(field)) ## No ForeignKeyField support #if isinstance(field, ForeignKeyField) and not field.deferred: # constraints.append(self.foreign_key_constraint(field)) if model_class._meta.constraints: for constraint in model_class._meta.constraints: if not isinstance(constraint, Node): constraint = SQL(constraint) constraints.append(constraint) return Clause(SQL(statement), model_class.as_entity(), EnclosedClause(*(columns + constraints)))
def add_foreign_key_constraint(self, table, column_name, rel, rel_column): # TODO: refactor, this duplicates QueryCompiler._create_foreign_key constraint = 'fk_%s_%s_refs_%s' % (table, column_name, rel) return Clause(SQL('ALTER TABLE'), Entity(table), SQL('ADD CONSTRAINT'), Entity(constraint), SQL('FOREIGN KEY'), EnclosedClause(Entity(column_name)), SQL('REFERENCES'), Entity(rel), EnclosedClause(Entity(rel_column)))
def _fts_cmd(cls, cmd, **extra_params): tbl = cls.as_entity() columns = [tbl] values = [cmd] for key, value in extra_params.items(): columns.append(Entity(key)) values.append(value) inner_clause = EnclosedClause(tbl) clause = Clause(SQL('INSERT INTO'), cls.as_entity(), EnclosedClause(*columns), SQL('VALUES'), EnclosedClause(*values)) return cls._meta.database.execute(clause)
def _add_restrict_foreign_key_constraint( self, table, column_name, rel, rel_column): constraint = 'fk_%s_%s_refs_%s' % (table, column_name, rel) return [ SQL('ALTER TABLE'), Entity(table), SQL('ADD CONSTRAINT'), Entity(constraint), SQL('FOREIGN KEY'), EnclosedClause(Entity(column_name)), SQL('REFERENCES'), Entity(rel), EnclosedClause(Entity(rel_column))]
def add_index(self, table, columns, unique=False): compiler = self.database.compiler() statement = 'CREATE UNIQUE INDEX' if unique else 'CREATE INDEX' return Clause(SQL(statement), Entity(compiler.index_name(table, columns)), SQL('ON'), Entity(table), EnclosedClause(*[Entity(column) for column in columns]))
def _update_column(self, table, column_to_update, fn): # Get the SQL used to create the given table. create_table = self._get_create_table(table) # Parse out the `CREATE TABLE` and column list portions of the query. raw_create, raw_columns = self.column_re.search(create_table).groups() # Clean up the individual column definitions. column_defs = [ col.strip() for col in self.column_split_re.findall(raw_columns) ] new_column_defs = [] new_column_names = [] original_column_names = [] for column_def in column_defs: column_name, = self.column_name_re.match(column_def).groups() if column_name == column_to_update: new_column_def = fn(column_name, column_def) if new_column_def: new_column_defs.append(new_column_def) original_column_names.append(column_name) column_name, = self.column_name_re.match( new_column_def).groups() new_column_names.append(column_name) else: new_column_defs.append(column_def) if not column_name.lower().startswith(('foreign', 'primary')): new_column_names.append(column_name) original_column_names.append(column_name) # Update the name of the new CREATE TABLE query. temp_table = table + '__tmp__' create = re.sub('("?)%s("?)' % table, '\\1%s\\2' % temp_table, raw_create) # Create the new table. columns = ', '.join(new_column_defs) queries = [ Clause(SQL('DROP TABLE IF EXISTS'), Entity(temp_table)), SQL('%s (%s)' % (create.strip(), columns)) ] # Populate new table. populate_table = Clause( SQL('INSERT INTO'), Entity(temp_table), EnclosedClause(*[Entity(col) for col in new_column_names]), SQL('SELECT'), CommaClause(*[Entity(col) for col in original_column_names]), SQL('FROM'), Entity(table)) queries.append(populate_table) # Drop existing table and rename temp table. queries.append(Clause(SQL('DROP TABLE'), Entity(table))) queries.append(self.rename_table(temp_table, table)) return queries
def alter_add_column(self, table, column_name, field): # Make field null at first. field_null, field.null = field.null, True field.name = field.db_column = column_name field_clause = self.database.compiler().field_definition(field) field.null = field_null parts = [ SQL('ALTER TABLE'), Entity(table), SQL('ADD COLUMN'), field_clause ] if isinstance(field, ForeignKeyField): parts.extend([ SQL('REFERENCES'), Entity(field.rel_model._meta.db_table), EnclosedClause(Entity(field.to_field.db_column)) ]) return Clause(*parts)
def _update_column(self, table, column_to_update, fn): columns = set(column.name.lower() for column in self.database.get_columns(table)) if column_to_update.lower() not in columns: raise ValueError('Column "%s" does not exist on "%s"' % (column_to_update, table)) # Get the SQL used to create the given table. table, create_table = self._get_create_table(table) # Get the indexes and SQL to re-create indexes. indexes = self.database.get_indexes(table) # Find any foreign keys we may need to remove. self.database.get_foreign_keys(table) # Parse out the `CREATE TABLE` and column list portions of the query. raw_create, raw_columns = self.column_re.search(create_table).groups() # Clean up the individual column definitions. column_defs = [ col.strip() for col in self.column_split_re.findall(raw_columns) ] new_column_defs = [] new_column_names = [] original_column_names = [] for column_def in column_defs: column_name, = self.column_name_re.match(column_def).groups() if column_name == column_to_update: new_column_def = fn(column_name, column_def) if new_column_def: new_column_defs.append(new_column_def) original_column_names.append(column_name) column_name, = self.column_name_re.match( new_column_def).groups() new_column_names.append(column_name) else: new_column_defs.append(column_def) if not column_name.lower().startswith(('foreign', 'primary')): new_column_names.append(column_name) original_column_names.append(column_name) # Create a mapping of original columns to new columns. original_to_new = dict(zip(original_column_names, new_column_names)) new_column = original_to_new.get(column_to_update) fk_filter_fn = lambda column_def: column_def if not new_column: # Remove any foreign keys associated with this column. fk_filter_fn = lambda column_def: None elif new_column != column_to_update: # Update any foreign keys for this column. fk_filter_fn = lambda column_def: self.fk_re.sub( 'FOREIGN KEY ("%s") ' % new_column, column_def) cleaned_columns = [] for column_def in new_column_defs: match = self.fk_re.match(column_def) if match is not None and match.groups()[0] == column_to_update: column_def = fk_filter_fn(column_def) if column_def: cleaned_columns.append(column_def) # Update the name of the new CREATE TABLE query. temp_table = table + '__tmp__' rgx = re.compile('("?)%s("?)' % table, re.I) create = rgx.sub('\\1%s\\2' % temp_table, raw_create) # Create the new table. columns = ', '.join(cleaned_columns) queries = [ Clause(SQL('DROP TABLE IF EXISTS'), Entity(temp_table)), SQL('%s (%s)' % (create.strip(), columns)) ] # Populate new table. populate_table = Clause( SQL('INSERT INTO'), Entity(temp_table), EnclosedClause(*[Entity(col) for col in new_column_names]), SQL('SELECT'), CommaClause(*[Entity(col) for col in original_column_names]), SQL('FROM'), Entity(table)) queries.append(populate_table) # Drop existing table and rename temp table. queries.append(Clause(SQL('DROP TABLE'), Entity(table))) queries.append(self.rename_table(temp_table, table)) # Re-create indexes. for index in indexes: # Auto-generated indexes in SQLite will not have associated SQL, # so pass over them. if not index.sql: continue if column_to_update in index.columns: if new_column: queries.append( SQL(index.sql.replace(column_to_update, new_column))) else: queries.append(SQL(index.sql)) return queries
def get_inline_fk_sql(self, field): return [ SQL('REFERENCES'), Entity(field.rel_model._meta.db_table), EnclosedClause(Entity(field.to_field.db_column)) ]
def generate_select(self, query, alias_map=None): model = query.model_class db = model._meta.database alias_map = self.calculate_alias_map(query, alias_map) if isinstance(query, CompoundSelect): clauses = [_StripParens(query)] else: if not query._distinct: clauses = [SQL('SELECT')] else: clauses = [SQL('SELECT DISTINCT')] if query._distinct not in (True, False): clauses += [SQL('ON'), EnclosedClause(*query._distinct)] if query._limit: clauses.append(SQL('TOP %s' % query._limit)) select_clause = Clause(*query._select) select_clause.glue = ', ' clauses.extend((select_clause, SQL('FROM'))) if query._from is None: clauses.append(model.as_entity().alias(alias_map[model])) else: clauses.append(CommaClause(*query._from)) # WINDOW semantic is ignored due to lack of knowledge (OVER ...) # if query._windows is not None: # clauses.append(SQL('WINDOW')) # clauses.append(CommaClause(*[ # Clause( # SQL(window._alias), # SQL('AS'), # window.__sql__()) # for window in query._windows])) join_clauses = self.generate_joins(query._joins, model, alias_map) if join_clauses: clauses.extend(join_clauses) if query._where is not None: clauses.extend([SQL('WHERE'), query._where]) if query._group_by: clauses.extend([SQL('GROUP BY'), CommaClause(*query._group_by)]) if query._having: clauses.extend([SQL('HAVING'), query._having]) if query._order_by: clauses.extend([SQL('ORDER BY'), CommaClause(*query._order_by)]) # if query._offset: # clauses.append(SQL('OFFSET %s ROWS' % query._offset)) # # if query._limit or (query._offset and db.limit_max): # limit = query._limit or db.limit_max # clauses.append(SQL('FETCH NEXT %s ROWS ONLY' % limit)) if query._offset: raise NotImplementedError('OFFSET is not supported') # No locking semantics supported due to lack of knowledge (WITH ...) # for_update, no_wait = query._for_update # if for_update: # stmt = 'FOR UPDATE NOWAIT' if no_wait else 'FOR UPDATE' # clauses.append(SQL(stmt)) return self.build_query(clauses, alias_map)