def sql(self, column_name=None, is_null=None): if is_null is None: is_null = self.is_null if column_name is None: column_name = self.name parts = [Entity(column_name), SQL(self.definition)] if self.is_unique: parts.append(SQL('UNIQUE')) if is_null: parts.append(SQL('NULL')) else: parts.append(SQL('NOT NULL')) if self.is_pk: parts.append(SQL('PRIMARY KEY')) if self.extra: parts.append(SQL(self.extra)) return NodeList(parts)
def search(search_str): search_str = substitute_search_column_aliases(search_str) # Replace synonyms with all their equivalents in OR search search_str = replace_synonyms(search_str) # Search query in the FTS table with db.atomic(): found_index_notes = ( NoteIndex.select( NoteIndex, fn.snippet(Entity(NoteIndex._meta.table_name)).alias("snippet"), ) .where(NoteIndex.match(search_str)) .order_by(NoteIndex.bm25()) .dicts() ) return found_index_notes
def _create_virtual_table(self, safe=True, **options): options = self.model.clean_options( merge_dict(self.model._meta.options, options)) # Structure: # CREATE VIRTUAL TABLE <model> # USING <extension_module> # ([prefix_arguments, ...] fields, ... [arguments, ...], [options...]) ctx = self._create_context() ctx.literal('CREATE VIRTUAL TABLE ') if safe: ctx.literal('IF NOT EXISTS ') (ctx .sql(self.model) .literal(' USING ')) ext_module = self.model._meta.extension_module if isinstance(ext_module, Node): return ctx.sql(ext_module) ctx.sql(SQL(ext_module)).literal(' ') arguments = [] meta = self.model._meta if meta.prefix_arguments: arguments.extend([SQL(a) for a in meta.prefix_arguments]) # Constraints, data-types, foreign and primary keys are all omitted. for field in meta.sorted_fields: if isinstance(field, (RowIDField)) or field._hidden: continue field_def = [Entity(field.column_name)] if field.unindexed: field_def.append(SQL('UNINDEXED')) arguments.append(NodeList(field_def)) if meta.arguments: arguments.extend([SQL(a) for a in meta.arguments]) if options: arguments.extend(self._create_table_option_sql(options)) return ctx.sql(EnclosedNodeList(arguments))
def rename_column(self, table, old_name, new_name): fk_objects = dict( (fk.column, fk) for fk in self.database.get_foreign_keys(table)) is_foreign_key = old_name in fk_objects column = self._get_column_definition(table, old_name) rename_ctx = (self._alter_table( self.make_context(), table).literal(' CHANGE ').sql(Entity(old_name)).literal(' ').sql( column.sql(column_name=new_name))) if is_foreign_key: fk_metadata = fk_objects[old_name] return [ self.drop_foreign_key_constraint(table, old_name), rename_ctx, self.add_foreign_key_constraint(table, new_name, fk_metadata.dest_table, fk_metadata.dest_column), ] else: return rename_ctx
def conflict_update(self, oc, query): action = oc._action.lower() if oc._action else '' if action in ('ignore', 'nothing'): parts = [SQL('ON CONFLICT')] if oc._conflict_target: parts.append( EnclosedNodeList([ Entity(col) if isinstance(col, basestring) else col for col in oc._conflict_target ])) parts.append(SQL('DO NOTHING')) return NodeList(parts) elif action in ('replace', 'upsert'): # No special stuff is necessary, this is just indicated by starting # the statement with UPSERT instead of INSERT. return elif oc._conflict_constraint: raise ValueError('CockroachDB does not support the usage of a ' 'constraint name. Use the column(s) instead.') return super(CockroachDatabase, self).conflict_update(oc, query)
def add_not_null(self, table, column): column_def = self._get_column_definition(table, column) add_not_null = (self .make_context() .literal('ALTER TABLE ') .sql(Entity(table)) .literal(' MODIFY ') .sql(column_def.sql(is_null=False))) fk_objects = dict( (fk.column, fk) for fk in self.database.get_foreign_keys(table)) if column not in fk_objects: return add_not_null fk_metadata = fk_objects[column] return (self.drop_foreign_key_constraint(table, column), add_not_null, self.add_foreign_key_constraint( table, column, fk_metadata.dest_table, fk_metadata.dest_column))
def clean_options(cls, options): content = options.get('content') prefix = options.get('prefix') tokenize = options.get('tokenize') if isinstance(content, basestring) and content == '': # Special-case content-less full-text search tables. options['content'] = "''" elif isinstance(content, Field): # Special-case to ensure fields are fully-qualified. options['content'] = Entity(content.model._meta.table_name, content.column_name) if prefix: if isinstance(prefix, (list, tuple)): prefix = ','.join([str(i) for i in prefix]) options['prefix'] = "'%s'" % prefix.strip("' ") if tokenize and cls._meta.extension_module.lower() == 'fts5': # Tokenizers need to be in quoted string for FTS5, but not for FTS3 # or FTS4. options['tokenize'] = '"%s"' % tokenize return options
def add_not_null(self, table, column): column = self._get_column_definition(table, column) return Clause(SQL('ALTER TABLE'), Entity(table), SQL('MODIFY'), column.sql(is_null=False))
def drop_constraint(self, table, name): return (self ._alter_table(self.make_context(), table) .literal(' DROP CONSTRAINT ') .sql(Entity(name)))
def add_unique(self, table, *column_names): constraint_name = 'uniq_%s' % '_'.join(column_names) constraint = NodeList(( SQL('UNIQUE'), EnclosedNodeList([Entity(column) for column in column_names]))) return self.add_constraint(table, constraint_name, constraint)
def add_constraint(self, table, name, constraint): return (self._alter_table( self.make_context(), table).literal(' ADD CONSTRAINT ').sql( Entity(name)).literal(' ').sql(constraint))
def drop_foreign_key_constraint(self, table, column_name): fk_constraint = self.get_foreign_key_constraint(table, column_name) return (self.make_context().literal('ALTER TABLE ').sql( Entity(table)).literal(' DROP FOREIGN KEY ').sql( Entity(fk_constraint)))
def drop_index(self, table, index_name): return (self.make_context().literal('DROP INDEX ').sql( Entity(index_name)).literal(' ON ').sql(Entity(table)))
def add_inline_fk_sql(self, ctx, field): return (ctx.literal(' REFERENCES ').sql( Entity(field.rel_model._meta.table_name)).literal(' ').sql( EnclosedNodeList((Entity(field.rel_field.column_name), ))))
def rename_table(self, old_name, new_name): return (self._alter_table(self.make_context(), old_name).literal(' RENAME TO ').sql( Entity(new_name)))
def _alter_table(self, ctx, table): return ctx.literal('ALTER TABLE ').sql(Entity(table))
def rename_table(self, old_name, new_name): return Clause(SQL('ALTER TABLE'), Entity(old_name), SQL('RENAME TO'), Entity(new_name))
def rename_column(self, table, old_name, new_name): return Clause(SQL('ALTER TABLE'), Entity(table), SQL('RENAME COLUMN'), Entity(old_name), SQL('TO'), Entity(new_name))
def _alter_column(self, ctx, table, column): return (self._alter_table(ctx, table).literal(' MODIFY ').sql( Entity(column)))
def _alter_column_type(column_name, column_def): node_list = field.ddl(ctx) sql, _ = ctx.sql(Entity(column)).sql(node_list).query() return sql
def _alter_column(self, ctx, table, column): return (self._alter_table(ctx, table).literal(' ALTER COLUMN ').sql( Entity(column)))
def drop_not_null(self, table, column): column = self._get_column_definition(table, column) if column.is_pk: raise ValueError('Primary keys can not be null') return Clause(SQL('ALTER TABLE'), Entity(table), SQL('MODIFY'), column.sql(is_null=True))
def rename_column(self, table, old_name, new_name): return (self._alter_table(self.make_context(), table).literal(' RENAME COLUMN ').sql( Entity(old_name)).literal(' TO ').sql( Entity(new_name)))
def rename_column(self, table, old_name, new_name): column = self._get_column_definition(table, old_name) return Clause(SQL('ALTER TABLE'), Entity(table), SQL('CHANGE'), Entity(old_name), column.sql(column_name=new_name))
def rename_table(self, old_name, new_name): return (self.make_context().literal('RENAME TABLE ').sql( Entity(old_name)).literal(' TO ').sql(Entity(new_name)))
def drop_index(self, table, index_name): return Clause(SQL('DROP INDEX'), Entity(index_name), SQL('ON'), Entity(table))
def drop_not_null(self, table, column): column = self._get_column_definition(table, column) if column.is_pk: raise ValueError('Primary keys can not be null') return (self.make_context().literal('ALTER TABLE ').sql( Entity(table)).literal(' MODIFY ').sql(column.sql(is_null=True)))
def _update_column(self, table, column_to_update, fn): columns = set(column.name.lower() for column in self.database.get_columns(table)) if column_to_update.lower() not in columns: raise ValueError('Column "%s" does not exist on "%s"' % (column_to_update, table)) # Get the SQL used to create the given table. table, create_table = self._get_create_table(table) # Get the indexes and SQL to re-create indexes. indexes = self.database.get_indexes(table) # Find any foreign keys we may need to remove. self.database.get_foreign_keys(table) # Parse out the `CREATE TABLE` and column list portions of the query. raw_create, raw_columns = self.column_re.search(create_table).groups() # Clean up the individual column definitions. column_defs = [ col.strip() for col in self.column_split_re.findall(raw_columns) ] new_column_defs = [] new_column_names = [] original_column_names = [] for column_def in column_defs: column_name, = self.column_name_re.match(column_def).groups() if column_name == column_to_update: new_column_def = fn(column_name, column_def) if new_column_def: new_column_defs.append(new_column_def) original_column_names.append(column_name) column_name, = self.column_name_re.match( new_column_def).groups() new_column_names.append(column_name) else: new_column_defs.append(column_def) if not column_name.lower().startswith(('foreign', 'primary')): new_column_names.append(column_name) original_column_names.append(column_name) # Create a mapping of original columns to new columns. original_to_new = dict(zip(original_column_names, new_column_names)) new_column = original_to_new.get(column_to_update) fk_filter_fn = lambda column_def: column_def if not new_column: # Remove any foreign keys associated with this column. fk_filter_fn = lambda column_def: None elif new_column != column_to_update: # Update any foreign keys for this column. fk_filter_fn = lambda column_def: self.fk_re.sub( 'FOREIGN KEY ("%s") ' % new_column, column_def) cleaned_columns = [] for column_def in new_column_defs: match = self.fk_re.match(column_def) if match is not None and match.groups()[0] == column_to_update: column_def = fk_filter_fn(column_def) if column_def: cleaned_columns.append(column_def) # Update the name of the new CREATE TABLE query. temp_table = table + '__tmp__' rgx = re.compile('("?)%s("?)' % table, re.I) create = rgx.sub('\\1%s\\2' % temp_table, raw_create) # Create the new table. columns = ', '.join(cleaned_columns) queries = [ Clause(SQL('DROP TABLE IF EXISTS'), Entity(temp_table)), SQL('%s (%s)' % (create.strip(), columns)) ] # Populate new table. populate_table = Clause( SQL('INSERT INTO'), Entity(temp_table), EnclosedClause(*[Entity(col) for col in new_column_names]), SQL('SELECT'), CommaClause(*[Entity(col) for col in original_column_names]), SQL('FROM'), Entity(table)) queries.append(populate_table) # Drop existing table and rename temp table. queries.append(Clause(SQL('DROP TABLE'), Entity(table))) queries.append(self.rename_table(temp_table, table)) # Re-create indexes. for index in indexes: # Auto-generated indexes in SQLite will not have associated SQL, # so pass over them. if not index.sql: continue if column_to_update in index.columns: if new_column: queries.append( SQL(index.sql.replace(column_to_update, new_column))) else: queries.append(SQL(index.sql)) return queries
def _update_column(self, table, column_to_update, fn): columns = set(column.name.lower() for column in self.database.get_columns(table)) if column_to_update.lower() not in columns: raise ValueError('Column "%s" does not exist on "%s"' % (column_to_update, table)) # Get the SQL used to create the given table. table, create_table = self._get_create_table(table) # Get the indexes and SQL to re-create indexes. indexes = self.database.get_indexes(table) # Find any foreign keys we may need to remove. self.database.get_foreign_keys(table) # Make sure the create_table does not contain any newlines or tabs, # allowing the regex to work correctly. create_table = re.sub(r'\s+', ' ', create_table) # Parse out the `CREATE TABLE` and column list portions of the query. raw_create, raw_columns = self.column_re.search(create_table).groups() # Clean up the individual column definitions. split_columns = self.column_split_re.findall(raw_columns) column_defs = [col.strip() for col in split_columns] new_column_defs = [] new_column_names = [] original_column_names = [] for column_def in column_defs: column_name, = self.column_name_re.match(column_def).groups() if column_name == column_to_update: new_column_def = fn(column_name, column_def) if new_column_def: new_column_defs.append(new_column_def) original_column_names.append(column_name) column_name, = self.column_name_re.match( new_column_def).groups() new_column_names.append(column_name) else: new_column_defs.append(column_def) if not column_name.lower().startswith(('foreign', 'primary')): new_column_names.append(column_name) original_column_names.append(column_name) # Create a mapping of original columns to new columns. original_to_new = dict(zip(original_column_names, new_column_names)) new_column = original_to_new.get(column_to_update) fk_filter_fn = lambda column_def: column_def if not new_column: # Remove any foreign keys associated with this column. fk_filter_fn = lambda column_def: None elif new_column != column_to_update: # Update any foreign keys for this column. fk_filter_fn = lambda column_def: self.fk_re.sub( 'FOREIGN KEY ("%s") ' % new_column, column_def) cleaned_columns = [] for column_def in new_column_defs: match = self.fk_re.match(column_def) if match is not None and match.groups()[0] == column_to_update: column_def = fk_filter_fn(column_def) if column_def: cleaned_columns.append(column_def) # Update the name of the new CREATE TABLE query. temp_table = table + '__tmp__' rgx = re.compile('("?)%s("?)' % table, re.I) create = rgx.sub('\\1%s\\2' % temp_table, raw_create) # Create the new table. columns = ', '.join(cleaned_columns) queries = [ NodeList([SQL('DROP TABLE IF EXISTS'), Entity(temp_table)]), SQL('%s (%s)' % (create.strip(), columns)) ] # Populate new table. populate_table = NodeList( (SQL('INSERT INTO'), Entity(temp_table), EnclosedNodeList([Entity(col) for col in new_column_names]), SQL('SELECT'), CommaNodeList([Entity(col) for col in original_column_names]), SQL('FROM'), Entity(table))) drop_original = NodeList([SQL('DROP TABLE'), Entity(table)]) # Drop existing table and rename temp table. queries += [ populate_table, drop_original, self.rename_table(temp_table, table) ] # Re-create user-defined indexes. User-defined indexes will have a # non-empty SQL attribute. for index in filter(lambda idx: idx.sql, indexes): if column_to_update not in index.columns: queries.append(SQL(index.sql)) elif new_column: sql = self._fix_index(index.sql, column_to_update, new_column) if sql is not None: queries.append(SQL(sql)) return queries
def get(self): disciplines = [] for upgrade_discipline in DISCIPLINE_MAP.keys(): ranks = get_ranks(upgrade_discipline) ranked_people_ids = sorted([k for k in ranks.keys() if k], key=lambda k: ranks[k])[:501] ranked_people_json = Select(from_list=[fn.JSON_EACH(json.dumps(ranked_people_ids))], columns=[Entity('value')]) query = (Person.select().where(Person.id << ranked_people_json)) for person in query: person.rank = int(ranks[person.id]) ranks[person.id] = person place = 0 prev_rank = 0 for person_id in ranked_people_ids: person = ranks[person_id] place += person.rank != prev_rank person.place = place prev_rank = person.rank disciplines.append({'name': upgrade_discipline, 'display': upgrade_discipline.split('_')[0].title(), 'ranks': [ranks[person_id] for person_id in ranked_people_ids], }) return ([marshal(d, discipline_ranks) for d in disciplines], 200, {'Expires': formatdate(timeval=time() + cache_timeout, usegmt=True)})