def _fts_cmd_sql(cls, cmd, **extra_params): tbl = cls._meta.entity columns = [tbl] values = [cmd] for key, value in extra_params.items(): columns.append(Entity(key)) values.append(value) return NodeList( (SQL('INSERT INTO'), cls._meta.entity, EnclosedNodeList(columns), SQL('VALUES'), EnclosedNodeList(values)))
def add_index(self, table, columns, unique=False): ctx = self.make_context() index_name = make_index_name(table, columns) return (ctx.literal( 'CREATE UNIQUE INDEX ' if unique else 'CREATE INDEX ').sql( Entity(index_name)).literal(' ON ').sql(Entity(table)).sql( EnclosedNodeList([Entity(column) for column in columns])))
def can_merge(self, username, ids=None): pb = type(username, (PlayerBoxTable, ), {}) unit_id_rows = None if ids: unit_id_rows = (pb.select( pb.gacha, pb.unit_id).where(~pb.favorited & (pb.id << ids)).group_by( pb.gacha, pb.unit_id).having( peewee.fn.COUNT(pb.unit_id) >= MERGE_COUNT)) else: unit_id_rows = (pb.select( pb.gacha, pb.unit_id).where(~pb.favorited).group_by( pb.gacha, pb.unit_id).having( peewee.fn.COUNT(pb.unit_id) >= MERGE_COUNT)) valid_units = [] for uir in unit_id_rows: if not str(uir.unit_id).endswith('4'): valid_units.append((uir.gacha, uir.unit_id)) if not valid_units: return vl = ValuesList(valid_units) return EnclosedNodeList([vl])
def add_inline_fk_sql(self, ctx, field): ctx = (ctx.literal(' REFERENCES ').sql( Entity(field.rel_model._meta.table_name)).literal(' ').sql( EnclosedNodeList((Entity(field.rel_field.column_name), )))) if field.on_delete is not None: ctx = ctx.literal(' ON DELETE %s' % field.on_delete) if field.on_update is not None: ctx = ctx.literal(' ON UPDATE %s' % field.on_update) return ctx
def merge(self, username, ids=None): pb = type(username, (PlayerBoxTable, ), {}) valid_units = self.can_merge(username, ids) if not valid_units: return 0 if ids: to_merge = (pb.select().where( (Tuple(pb.gacha, pb.unit_id).in_(valid_units)) & (pb.id << ids)).order_by(pb.unit_id)) else: to_merge = (pb.select().where( Tuple(pb.gacha, pb.unit_id).in_(valid_units)).order_by(pb.unit_id)) grouped_units = {} curr_unit = [] curr_id = None for row in to_merge: if row.gacha not in grouped_units: grouped_units[row.gacha] = {} if curr_id != (row.gacha, row.unit_id): if curr_unit: grouped_units[curr_id[0]][curr_id[1]] = curr_unit curr_unit = [] curr_id = (row.gacha, row.unit_id) curr_unit.append(row) if curr_unit: grouped_units[curr_id[0]][curr_id[1]] = curr_unit to_add = {} to_delete = [] for gacha in grouped_units: to_add[gacha] = [] for uid in grouped_units[gacha]: num_merged = len(grouped_units[gacha][uid]) // MERGE_COUNT to_add[gacha] += [round(uid + 0.1, 1)] * num_merged for u in grouped_units[gacha][uid][:(num_merged * MERGE_COUNT)]: to_delete.append((gacha, u.id)) add_query = [] for gacha in to_add: add_query += self.gachas[gacha].gen_unit_infos(to_add[gacha]) delete_query = EnclosedNodeList([ValuesList(to_delete)]) pb.insert_many(add_query).execute() return pb.delete().where(Tuple(pb.gacha, pb.id).in_(delete_query)).execute()
def ddl(self, ctx): node_list = super().ddl(ctx) ag_auto_gen = SQL("GENERATED ALWAYS" if self.auto_gen_always else "") ag_col_type = SQL(self.auto_gen_column_type.upper()) return NodeList(( node_list, ag_auto_gen, SQL("AS"), EnclosedNodeList([ self.auto_gen_expression, ]), ag_col_type, ))
def add_foreign_key_constraint(self, table, column_name, rel, rel_column): # TODO: refactor, this duplicates QueryCompiler._create_foreign_key constraint = 'fk_%s_%s_refs_%s' % (table, column_name, rel) return (self .make_context() .literal('ALTER TABLE ') .sql(Entity(table)) .literal(' ADD CONSTRAINT ') .sql(Entity(constraint)) .literal(' FOREIGN KEY ') .sql(EnclosedNodeList((Entity(column_name),))) .literal(' REFERENCES ') .sql(Entity(rel)) .literal(' (') .sql(Entity(rel_column)) .literal(')'))
def _create_virtual_table(self, safe=True, **options): options = self.model.clean_options( merge_dict(self.model._meta.options, options)) # Structure: # CREATE VIRTUAL TABLE <model> # USING <extension_module> # ([prefix_arguments, ...] fields, ... [arguments, ...], [options...]) ctx = self._create_context() ctx.literal('CREATE VIRTUAL TABLE ') if safe: ctx.literal('IF NOT EXISTS ') (ctx .sql(self.model) .literal(' USING ')) ext_module = self.model._meta.extension_module if isinstance(ext_module, Node): return ctx.sql(ext_module) ctx.sql(SQL(ext_module)).literal(' ') arguments = [] meta = self.model._meta if meta.prefix_arguments: arguments.extend([SQL(a) for a in meta.prefix_arguments]) # Constraints, data-types, foreign and primary keys are all omitted. for field in meta.sorted_fields: if isinstance(field, (RowIDField)) or field._hidden: continue field_def = [Entity(field.column_name)] if field.unindexed: field_def.append(SQL('UNINDEXED')) arguments.append(NodeList(field_def)) if meta.arguments: arguments.extend([SQL(a) for a in meta.arguments]) if options: arguments.extend(self._create_table_option_sql(options)) return ctx.sql(EnclosedNodeList(arguments))
def add_foreign_key_constraint(self, table, column_name, rel, rel_column, on_delete=None, on_update=None): constraint = 'fk_%s_%s_refs_%s' % (table, column_name, rel) ctx = (self.make_context().literal('ALTER TABLE ').sql( Entity(table)).literal(' ADD CONSTRAINT ').sql( Entity(constraint)).literal(' FOREIGN KEY ').sql( EnclosedNodeList( (Entity(column_name), ))).literal(' REFERENCES ').sql( Entity(rel)).literal(' (').sql( Entity(rel_column)).literal(')')) if on_delete is not None: ctx = ctx.literal(' ON DELETE %s' % on_delete) if on_update is not None: ctx = ctx.literal(' ON UPDATE %s' % on_update) return ctx
def conflict_update(self, oc, query): action = oc._action.lower() if oc._action else '' if action in ('ignore', 'nothing'): parts = [SQL('ON CONFLICT')] if oc._conflict_target: parts.append( EnclosedNodeList([ Entity(col) if isinstance(col, basestring) else col for col in oc._conflict_target ])) parts.append(SQL('DO NOTHING')) return NodeList(parts) elif action in ('replace', 'upsert'): # No special stuff is necessary, this is just indicated by starting # the statement with UPSERT instead of INSERT. return elif oc._conflict_constraint: raise ValueError('CockroachDB does not support the usage of a ' 'constraint name. Use the column(s) instead.') return super(CockroachDatabase, self).conflict_update(oc, query)
def _update_column(self, table, column_to_update, fn): columns = set(column.name.lower() for column in self.database.get_columns(table)) if column_to_update.lower() not in columns: raise ValueError('Column "%s" does not exist on "%s"' % (column_to_update, table)) # Get the SQL used to create the given table. table, create_table = self._get_create_table(table) # Get the indexes and SQL to re-create indexes. indexes = self.database.get_indexes(table) # Find any foreign keys we may need to remove. self.database.get_foreign_keys(table) # Make sure the create_table does not contain any newlines or tabs, # allowing the regex to work correctly. create_table = re.sub(r'\s+', ' ', create_table) # Parse out the `CREATE TABLE` and column list portions of the query. raw_create, raw_columns = self.column_re.search(create_table).groups() # Clean up the individual column definitions. split_columns = self.column_split_re.findall(raw_columns) column_defs = [col.strip() for col in split_columns] new_column_defs = [] new_column_names = [] original_column_names = [] for column_def in column_defs: column_name, = self.column_name_re.match(column_def).groups() if column_name == column_to_update: new_column_def = fn(column_name, column_def) if new_column_def: new_column_defs.append(new_column_def) original_column_names.append(column_name) column_name, = self.column_name_re.match( new_column_def).groups() new_column_names.append(column_name) else: new_column_defs.append(column_def) if not column_name.lower().startswith(('foreign', 'primary')): new_column_names.append(column_name) original_column_names.append(column_name) # Create a mapping of original columns to new columns. original_to_new = dict(zip(original_column_names, new_column_names)) new_column = original_to_new.get(column_to_update) fk_filter_fn = lambda column_def: column_def if not new_column: # Remove any foreign keys associated with this column. fk_filter_fn = lambda column_def: None elif new_column != column_to_update: # Update any foreign keys for this column. fk_filter_fn = lambda column_def: self.fk_re.sub( 'FOREIGN KEY ("%s") ' % new_column, column_def) cleaned_columns = [] for column_def in new_column_defs: match = self.fk_re.match(column_def) if match is not None and match.groups()[0] == column_to_update: column_def = fk_filter_fn(column_def) if column_def: cleaned_columns.append(column_def) # Update the name of the new CREATE TABLE query. temp_table = table + '__tmp__' rgx = re.compile('("?)%s("?)' % table, re.I) create = rgx.sub('\\1%s\\2' % temp_table, raw_create) # Create the new table. columns = ', '.join(cleaned_columns) queries = [ NodeList([SQL('DROP TABLE IF EXISTS'), Entity(temp_table)]), SQL('%s (%s)' % (create.strip(), columns)) ] # Populate new table. populate_table = NodeList( (SQL('INSERT INTO'), Entity(temp_table), EnclosedNodeList([Entity(col) for col in new_column_names]), SQL('SELECT'), CommaNodeList([Entity(col) for col in original_column_names]), SQL('FROM'), Entity(table))) drop_original = NodeList([SQL('DROP TABLE'), Entity(table)]) # Drop existing table and rename temp table. queries += [ populate_table, drop_original, self.rename_table(temp_table, table) ] # Re-create user-defined indexes. User-defined indexes will have a # non-empty SQL attribute. for index in filter(lambda idx: idx.sql, indexes): if column_to_update not in index.columns: queries.append(SQL(index.sql)) elif new_column: sql = self._fix_index(index.sql, column_to_update, new_column) if sql is not None: queries.append(SQL(sql)) return queries
def add_inline_fk_sql(self, ctx, field): return (ctx.literal(' REFERENCES ').sql( Entity(field.rel_model._meta.table_name)).literal(' ').sql( EnclosedNodeList((Entity(field.rel_field.column_name), ))))
def add_unique(self, table, *column_names): constraint_name = 'uniq_%s' % '_'.join(column_names) constraint = NodeList(( SQL('UNIQUE'), EnclosedNodeList([Entity(column) for column in column_names]))) return self.add_constraint(table, constraint_name, constraint)