def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): # Skip to the base class to avoid trying to add or drop # PostgreSQL-specific LIKE indexes. BaseDatabaseSchemaEditor._alter_field( self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict, ) # Add or remove `DEFAULT unique_rowid()` for AutoField. old_suffix = old_field.db_type_suffix(self.connection) new_suffix = new_field.db_type_suffix(self.connection) if old_suffix != new_suffix: if new_suffix: self.execute(self.sql_alter_column % { 'table': self.quote_name(model._meta.db_table), 'changes': 'ALTER COLUMN %(column)s SET %(expression)s' % { 'column': self.quote_name(new_field.column), 'expression': new_suffix, } }) else: self.execute(self.sql_alter_column % { 'table': self.quote_name(model._meta.db_table), 'changes': 'ALTER COLUMN %(column)s DROP DEFAULT' % { 'column': self.quote_name(new_field.column), } })
def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): # ALTER COLUMN TYPE is experimental. # https://github.com/cockroachdb/cockroach/issues/49329 if (self.connection.features.is_cockroachdb_21_1 and old_type != new_type or getattr(old_field, 'db_collation', None) != getattr(new_field, 'db_collation', None)): self.execute('SET enable_experimental_alter_column_type_general = true') # Skip to the base class to avoid trying to add or drop # PostgreSQL-specific LIKE indexes. BaseDatabaseSchemaEditor._alter_field( self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict, ) # Add or remove `DEFAULT unique_rowid()` for AutoField. old_suffix = old_field.db_type_suffix(self.connection) new_suffix = new_field.db_type_suffix(self.connection) if old_suffix != new_suffix: if new_suffix: self.execute(self.sql_alter_column % { 'table': self.quote_name(model._meta.db_table), 'changes': 'ALTER COLUMN %(column)s SET %(expression)s' % { 'column': self.quote_name(new_field.column), 'expression': new_suffix, } }) else: self.execute(self.sql_alter_column % { 'table': self.quote_name(model._meta.db_table), 'changes': 'ALTER COLUMN %(column)s DROP DEFAULT' % { 'column': self.quote_name(new_field.column), } })
def install_model(custom_model): from django.db import connection from django.db.backends.base.schema import BaseDatabaseSchemaEditor editor = BaseDatabaseSchemaEditor(connect) try: editor.create_model(model=custom_model) except AttributeError as aerror: print(aerror)
def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): # Skip to the base class to avoid trying to add or drop # PostgreSQL-specific LIKE indexes. BaseDatabaseSchemaEditor._alter_field( self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict, )
def destroy(self, request, *args, **kwargs): objeto = self.get_object() modelo = crear_modelo(objeto.nombre) self.perform_destroy(objeto) esquema = BaseDatabaseSchemaEditor(connection) esquema.delete_model(modelo) return Response(status=204)
def database_forwards(self, app_label, schema_editor: BaseDatabaseSchemaEditor, from_state, to_state: ProjectState): to_model = to_state.apps.get_model(app_label, self.model_name) meta = to_model._meta to_field = meta.get_field(self.name) if to_field.default != NOT_PROVIDED: table_name = schema_editor.quote_name(meta.db_table) column = schema_editor.quote_name(to_field.column) default = schema_editor.quote_value(to_field.default) schema_editor.execute("ALTER TABLE {} ALTER COLUMN {} SET DEFAULT {}".format(table_name, column, default))
def _alter_field(self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict=False): if old_field.db_index or old_field.unique: index_name = self._create_index_name(model._meta.db_table, [old_field.column]) self.execute(self._delete_index_sql(model, index_name)) BaseDatabaseSchemaEditor._alter_field( self, model, old_field, new_field, old_type, new_type, old_db_params, new_db_params, strict, )
def database_forwards( self, app_label: str, schema_editor: BaseDatabaseSchemaEditor, from_state: ProjectState, to_state: ProjectState, ) -> None: model = to_state.apps.get_model(app_label, self.model) sql = utils.remove_audit_logging_sql(audit_logged_model=model) for query in sql: schema_editor.execute(query)
def desde_tabla(self, instancia): opciones = {"__module__": "capas"} geom = instancia.atributos.filter(nombre="geom").first() if geom is None: raise ValidationError({"capa": "falta atributo geom"}) opciones.update({"geom": self.get_tipo(geom.tipo)}) for i in instancia.atributos.all(): if i.nombre.lower() in ["id", "geom"]: continue opciones.update({i.nombre.lower(): self.get_tipo(i.tipo)}) modelo = type(instancia.nombre, (models.Model, ), opciones) esquema = BaseDatabaseSchemaEditor(connection) esquema.deferred_sql = [] esquema.create_model(modelo)
def digest(connection, *args): """Return a digest hash for a set of arguments. This is mostly used as part of the index/constraint name generation processes. It offers compatibility with a range of Django versions. Args: connection (object): The database connection. *args (tuple): The positional arguments used to build the digest hash out of. Returns: str: The resulting digest hash. """ if (BaseDatabaseSchemaEditor and hasattr(BaseDatabaseSchemaEditor, '_digest')): # Django >= 1.8 # # Note that _digest() is a classmethod that is common across all # database backends. We don't need to worry about using a # per-instance version. If that changes, we'll need to create a # SchemaEditor. return BaseDatabaseSchemaEditor._digest(*args) else: # Django < 1.8 return connection.creation._digest(*args)
def _alter_column_type_sql(self, model, old_field, new_field, new_type): self.sql_alter_column_type = 'ALTER COLUMN %(column)s TYPE %(type)s' # Cast when data type changed. if self._field_data_type(old_field) != self._field_data_type( new_field): self.sql_alter_column_type += ' USING %(column)s::%(type)s' # Make ALTER TYPE with SERIAL make sense. # table = strip_quotes(model._meta.db_table) serial_fields_map = { 'bigserial': 'bigint', 'serial': 'integer', 'smallserial': 'smallint' } if new_type.lower() in serial_fields_map: column = strip_quotes(new_field.column) return ( ( self.sql_alter_column_type % { "column": self.quote_name(column), "type": serial_fields_map[new_type.lower()], }, [], ), # The PostgreSQL backend manages the column sequence here but # this isn't applicable on CockroachDB because unique_rowid() # is used instead of sequences. [], ) else: return BaseDatabaseSchemaEditor._alter_column_type_sql( self, model, old_field, new_field, new_type)
def _sql_indexes_for_field(self, model, field): """ Return the CREATE INDEX SQL statements for a single model field :param model: :param field: :return: """ def qn(name): if name.startswith('"') and name.endswith('"'): return name # Quoting once is enough. return '"%s"' % name max_name_length = 63 if field.db_index and not field.unique: i_name = "%s_%s" % ( model._meta.db_table, BaseDatabaseSchemaEditor._digest(field.column), ) return [ "CREATE INDEX %s ON %s(%s)" % ( qn(truncate_name(i_name, max_name_length)), qn(model._meta.db_table), qn(field.column), ) ] return []
def database_backwards( self, app_label: str, schema_editor: BaseDatabaseSchemaEditor, from_state: ProjectState, to_state: ProjectState, ) -> None: model = from_state.apps.get_model(app_label, self.model) sql = utils.add_audit_logging_sql( audit_logged_model=model, context_model=utils.get_context_model(from_state.apps), log_entry_model=utils.get_log_entry_model(from_state.apps), ) for query in sql: schema_editor.execute(query)
def importar_tabla(self): """ crea la tabla con su estructura en la base de datos """ self.validar_capa() attrs = self.capa.common_attributes opciones = {"__module__": "capas"} for i in attrs: if i.lower() == "id": continue opciones.update( {i.lower(): models.CharField(max_length=255, null=True)}) opciones.update({"geom": self.get_tipo(self.capa[0].geometry.type)}) modelo = type(self.nombre, (models.Model, ), opciones) esquema = BaseDatabaseSchemaEditor(connection) esquema.deferred_sql = [] esquema.create_model(modelo) self.alterar_registros_nuevo(modelo) self.registrar_estructura(attrs)
def test_effective_default_callable(self): """SchemaEditor.effective_default() shouldn't call callable defaults.""" class MyStr(str): def __call__(self): return self class MyCharField(models.CharField): def _get_default(self): return self.default field = MyCharField(max_length=1, default=MyStr) self.assertEqual(BaseDatabaseSchemaEditor._effective_default(field), MyStr)
def unseed(apps: Apps, schema_editor: BaseDatabaseSchemaEditor): schema_editor.execute("delete from playlister_song") schema_editor.execute(""" delete from playlister_playlistentry where playlist_id in ( select id from playlister_playlist where name like 'Seeded playlist %') """) schema_editor.execute("""delete from playlister_playlist where name like 'Seeded playlist %'""")
def _index_columns(self, table, columns, col_suffixes, opclasses): # cockroachdb doesn't support PostgreSQL opclasses. return BaseDatabaseSchemaEditor._index_columns(self, table, columns, col_suffixes, opclasses)
def __init__(self): self.TimeSeriesBaseModel = get_model(self.app_label, self.model_name) # first check to see if the model exists in Django's model cache from django.apps import registry try: if registry.apps.get_model(self.app_label, self.name): self.TimeSeriesModel = registry.apps.get_model( self.app_label, self.name) return except LookupError: pass # model did not exist in Django's model cache, so let's build it and load it class Meta: db_table = self.name # TODO support overriding table name fields = { 'original_model': models.ForeignKey(self.TimeSeriesBaseModel), 'timestamp': models.DateTimeField(db_index=True), '__module__': self.app_label + '.models', 'Meta': Meta } for f in self.time_series_properties: if f == 'timestamp': raise Exception('Cannot time series a field named "timestamp"') field_class = None max_digits = 0 decimal_places = 0 max_length = 0 for model_field in self.TimeSeriesBaseModel._meta.fields: if model_field.name == f: field_class = model_field.__class__ # TODO support all default django fields if not (issubclass(field_class, models.DecimalField) or issubclass(field_class, models.IntegerField) or issubclass(field_class, models.CharField) or issubclass(field_class, models.TextField) or issubclass(field_class, models.URLField) or issubclass(field_class, models.DateTimeField) or issubclass(field_class, models.DateField) or issubclass(field_class, models.FloatField)): raise Exception("Class {} not supported".format( field_class.__name__)) if field_class == models.DecimalField: # must bring over params in the case of decimal field max_digits = model_field.max_digits decimal_places = model_field.decimal_places if field_class == models.CharField: # must bring over max length in case of char field max_length = model_field.max_length break if field_class is None: raise Exception( "Field {} could not be found in class {}".format( f, self.TimeSeriesBaseModel.__name__)) fields[f] = field_class() if field_class == models.DecimalField: fields[f].max_digits = max_digits fields[f].decimal_places = decimal_places if field_class == models.CharField: fields[f].max_length = max_length for field_name, field_instance, function in self.time_series_functions: fields[field_name] = field_instance self.TimeSeriesModel = type(self.name, (models.Model, ), fields) # next make sure that the tables exist in the database. if not, build them. from django.db import connection try: cursor = connection.cursor() cursor.execute("SELECT * FROM {} LIMIT 1".format(self.name)) except ProgrammingError: # Looks like the table doesn't exist! Let's build it! from django.db import connection from django.db.backends.base.schema import BaseDatabaseSchemaEditor schema_editor = BaseDatabaseSchemaEditor(connection) with schema_editor: schema_editor.create_model(self.TimeSeriesModel)
def _field_indexes_sql(self, model, field): # Postgres needs an operator defined for like queries to work # properly text and varchars. Skip to the base class version # to avoid this. return BaseDatabaseSchemaEditor._field_indexes_sql(self, model, field)
def _model_indexes_sql(self, model): # Postgres customizes _model_indexes_sql to add special-case # options for string fields. Skip to the base class version # to avoid this. return BaseDatabaseSchemaEditor._model_indexes_sql(self, model)
def __init__(self): self.TimeSeriesBaseModel = get_model(self.app_label, self.model_name) # first check to see if the model exists in Django's model cache from django.apps import registry try: if registry.apps.get_model(self.app_label, self.name): self.TimeSeriesModel = registry.apps.get_model(self.app_label, self.name) return except LookupError: pass # model did not exist in Django's model cache, so let's build it and load it class Meta: db_table = self.name # TODO support overriding table name fields = { 'original_model': models.ForeignKey(self.TimeSeriesBaseModel), 'timestamp': models.DateTimeField(db_index=True), '__module__': self.app_label + '.models', 'Meta': Meta } for f in self.time_series_properties: if f == 'timestamp': raise Exception('Cannot time series a field named "timestamp"') field_class = None max_digits = 0 decimal_places = 0 max_length = 0 for model_field in self.TimeSeriesBaseModel._meta.fields: if model_field.name == f: field_class = model_field.__class__ # TODO support all default django fields if not (issubclass(field_class, models.DecimalField) or issubclass(field_class, models.IntegerField) or issubclass(field_class, models.CharField) or issubclass(field_class, models.TextField) or issubclass(field_class, models.URLField) or issubclass(field_class, models.DateTimeField) or issubclass(field_class, models.DateField) or issubclass(field_class, models.FloatField)): raise Exception("Class {} not supported".format(field_class.__name__)) if field_class == models.DecimalField: # must bring over params in the case of decimal field max_digits = model_field.max_digits decimal_places = model_field.decimal_places if field_class == models.CharField: # must bring over max length in case of char field max_length = model_field.max_length break if field_class is None: raise Exception("Field {} could not be found in class {}".format(f, self.TimeSeriesBaseModel.__name__)) fields[f] = field_class() if field_class == models.DecimalField: fields[f].max_digits = max_digits fields[f].decimal_places = decimal_places if field_class == models.CharField: fields[f].max_length = max_length for field_name, field_instance, function in self.time_series_functions: fields[field_name] = field_instance self.TimeSeriesModel = type(self.name, (models.Model,), fields) # next make sure that the tables exist in the database. if not, build them. from django.db import connection try: cursor = connection.cursor() cursor.execute("SELECT * FROM {} LIMIT 1".format(self.name)) except ProgrammingError: # Looks like the table doesn't exist! Let's build it! from django.db import connection from django.db.backends.base.schema import BaseDatabaseSchemaEditor schema_editor = BaseDatabaseSchemaEditor(connection) with schema_editor: schema_editor.create_model(self.TimeSeriesModel)