def Match(columns, expr, modifier=None): if isinstance(columns, (list, tuple)): match = fn.MATCH(*columns) # Tuple of one or more columns / fields. else: match = fn.MATCH(columns) # Single column / field. args = expr if modifier is None else NodeList((expr, SQL(modifier))) return NodeList((match, fn.AGAINST(args)))
def _fts_cmd_sql(cls, cmd, **extra_params): tbl = cls._meta.entity columns = [tbl] values = [cmd] for key, value in extra_params.items(): columns.append(Entity(key)) values.append(value) return NodeList( (SQL('INSERT INTO'), cls._meta.entity, EnclosedNodeList(columns), SQL('VALUES'), EnclosedNodeList(values)))
def sql(self, column_name=None, is_null=None): if is_null is None: is_null = self.is_null if column_name is None: column_name = self.name parts = [Entity(column_name), SQL(self.definition)] if self.is_unique: parts.append(SQL('UNIQUE')) if is_null: parts.append(SQL('NULL')) else: parts.append(SQL('NOT NULL')) if self.is_pk: parts.append(SQL('PRIMARY KEY')) if self.extra: parts.append(SQL(self.extra)) return NodeList(parts)
def test_model_indexes_complex_columns(self): class Taxonomy(TestModel): name = CharField() name_class = CharField() class Meta: database = self.database name = NodeList((fn.LOWER(Taxonomy.name), SQL('varchar_pattern_ops'))) index = (Taxonomy .index(name, Taxonomy.name_class) .where(Taxonomy.name_class == 'scientific name')) Taxonomy.add_index(index) self.assertIndexes(Taxonomy, [ ('CREATE INDEX "taxonomy_name_class" ON "taxonomy" (' 'LOWER("name") varchar_pattern_ops, "name_class") ' 'WHERE ("name_class" = ?)', ['scientific name']), ])
def _create_virtual_table(self, safe=True, **options): options = self.model.clean_options( merge_dict(self.model._meta.options, options)) # Structure: # CREATE VIRTUAL TABLE <model> # USING <extension_module> # ([prefix_arguments, ...] fields, ... [arguments, ...], [options...]) ctx = self._create_context() ctx.literal('CREATE VIRTUAL TABLE ') if safe: ctx.literal('IF NOT EXISTS ') (ctx.sql(self.model).literal(' USING ')) ext_module = self.model._meta.extension_module if isinstance(ext_module, Node): return ctx.sql(ext_module) ctx.sql(SQL(ext_module)).literal(' ') arguments = [] meta = self.model._meta if meta.prefix_arguments: arguments.extend([SQL(a) for a in meta.prefix_arguments]) # Constraints, data-types, foreign and primary keys are all omitted. for field in meta.sorted_fields: if isinstance(field, (RowIDField)) or field._hidden: continue field_def = [Entity(field.column_name)] if field.unindexed: field_def.append(SQL('UNINDEXED')) arguments.append(NodeList(field_def)) if meta.arguments: arguments.extend([SQL(a) for a in meta.arguments]) if options: arguments.extend(self._create_table_option_sql(options)) return ctx.sql(EnclosedNodeList(arguments))
def test_date_time_math_mysql(self): nl = NodeList((SQL('INTERVAL'), Schedule.interval, SQL('SECOND'))) next_occurrence = fn.date_add(Task.last_run, nl) self._do_test_date_time_math(next_occurrence)
def ddl(self, ctx): node_list = super(AutoIncrementField, self).ddl(ctx) return NodeList((node_list, SQL('AUTOINCREMENT')))
def _update_column(self, table, column_to_update, fn): columns = set(column.name.lower() for column in self.database.get_columns(table)) if column_to_update.lower() not in columns: raise ValueError('Column "%s" does not exist on "%s"' % (column_to_update, table)) # Get the SQL used to create the given table. table, create_table = self._get_create_table(table) # Get the indexes and SQL to re-create indexes. indexes = self.database.get_indexes(table) # Find any foreign keys we may need to remove. self.database.get_foreign_keys(table) # Make sure the create_table does not contain any newlines or tabs, # allowing the regex to work correctly. create_table = re.sub(r'\s+', ' ', create_table) # Parse out the `CREATE TABLE` and column list portions of the query. raw_create, raw_columns = self.column_re.search(create_table).groups() # Clean up the individual column definitions. split_columns = self.column_split_re.findall(raw_columns) column_defs = [col.strip() for col in split_columns] new_column_defs = [] new_column_names = [] original_column_names = [] constraint_terms = ('foreign ', 'primary ', 'constraint ') for column_def in column_defs: column_name, = self.column_name_re.match(column_def).groups() if column_name == column_to_update: new_column_def = fn(column_name, column_def) if new_column_def: new_column_defs.append(new_column_def) original_column_names.append(column_name) column_name, = self.column_name_re.match( new_column_def).groups() new_column_names.append(column_name) else: new_column_defs.append(column_def) # Avoid treating constraints as columns. if not column_def.lower().startswith(constraint_terms): new_column_names.append(column_name) original_column_names.append(column_name) # Create a mapping of original columns to new columns. original_to_new = dict(zip(original_column_names, new_column_names)) new_column = original_to_new.get(column_to_update) fk_filter_fn = lambda column_def: column_def if not new_column: # Remove any foreign keys associated with this column. fk_filter_fn = lambda column_def: None elif new_column != column_to_update: # Update any foreign keys for this column. fk_filter_fn = lambda column_def: self.fk_re.sub( 'FOREIGN KEY ("%s") ' % new_column, column_def) cleaned_columns = [] for column_def in new_column_defs: match = self.fk_re.match(column_def) if match is not None and match.groups()[0] == column_to_update: column_def = fk_filter_fn(column_def) if column_def: cleaned_columns.append(column_def) # Update the name of the new CREATE TABLE query. temp_table = table + '__tmp__' rgx = re.compile('("?)%s("?)' % table, re.I) create = rgx.sub('\\1%s\\2' % temp_table, raw_create) # Create the new table. columns = ', '.join(cleaned_columns) queries = [ NodeList([SQL('DROP TABLE IF EXISTS'), Entity(temp_table)]), SQL('%s (%s)' % (create.strip(), columns)) ] # Populate new table. populate_table = NodeList( (SQL('INSERT INTO'), Entity(temp_table), EnclosedNodeList([Entity(col) for col in new_column_names]), SQL('SELECT'), CommaNodeList([Entity(col) for col in original_column_names]), SQL('FROM'), Entity(table))) drop_original = NodeList([SQL('DROP TABLE'), Entity(table)]) # Drop existing table and rename temp table. queries += [ populate_table, drop_original, self.rename_table(temp_table, table) ] # Re-create user-defined indexes. User-defined indexes will have a # non-empty SQL attribute. for index in filter(lambda idx: idx.sql, indexes): if column_to_update not in index.columns: queries.append(SQL(index.sql)) elif new_column: sql = self._fix_index(index.sql, column_to_update, new_column) if sql is not None: queries.append(SQL(sql)) return queries
def add_unique(self, table, *column_names): constraint_name = 'uniq_%s' % '_'.join(column_names) constraint = NodeList( (SQL('UNIQUE'), EnclosedNodeList([Entity(column) for column in column_names]))) return self.add_constraint(table, constraint_name, constraint)
def cast_jsonb(node): return NodeList((node, SQL('::jsonb')), glue='')
def ddl_datatype(self, ctx): data_type = self.__field.ddl_datatype(ctx) return NodeList((data_type, SQL('[]' * self.dimensions)), glue='')