def compiler(queryset, connection, using, **kwargs): result = MagicMock(name='mock_connection.ops.compiler()') # noinspection PyProtectedMember result.execute_sql.side_effect = NotSupportedError( "Mock database tried to execute SQL for {} model.".format( queryset.model._meta.object_name)) return result
def as_sql(self, compiler, connection, **extra_context): if not connection.features.supports_area_geodetic and self.geo_field.geodetic( connection ): raise NotSupportedError( "Area on geodetic coordinate systems not supported." ) return super().as_sql(compiler, connection, **extra_context)
def as_postgresql(self, compiler, connection, **extra_context): function = None if self.geo_field.geodetic(connection) and not self.source_is_geography(): raise NotSupportedError("ST_Perimeter cannot use a non-projected non-geography field.") dim = min(f.dim for f in self.get_source_fields()) if dim > 2: function = connection.ops.perimeter3d return super().as_sql(compiler, connection, function=function, **extra_context)
def __enter__(self): # Some SQLite schema alterations need foreign key constraints to be # disabled. Enforce it here for the duration of the schema edition. if not self.connection.disable_constraint_checking(): raise NotSupportedError( 'SQLite schema editor cannot be used while foreign key ' 'constraint checks are enabled. Make sure to disable them ' 'before entering a transaction.atomic() context because ' 'SQLite does not support disabling them in the middle of ' 'a multi-statement transaction.') return super().__enter__()
def alter_db_table(self, model, old_db_table, new_db_table, disable_constraints=True): if disable_constraints and self._is_referenced_by_fk_constraint(old_db_table): if self.connection.in_atomic_block: raise NotSupportedError(( 'Renaming the %r table while in a transaction is not ' 'supported on SQLite because it would break referential ' 'integrity. Try adding `atomic = False` to the Migration class.' ) % old_db_table) self.connection.enable_constraint_checking() super().alter_db_table(model, old_db_table, new_db_table) self.connection.disable_constraint_checking() else: super().alter_db_table(model, old_db_table, new_db_table)
def resolve_path(self, col): """ resolve the path of Alias to find the final model, with the final name of the attr :param Col col: the column to resolve :rtype: tuple[Alias], str """ # current = Alias = NamedTuple(model,parent,field,attrname,m2m) if isinstance(col, RawSQL): # special case with prefetch_related which get the id from the sql query. # this request can work on the dynamic rest backend, but the performance # will be degraded since the remote backend will return all id instead of the # one in the current filter. # to prevent this, a small snipet can be added to override the # dynamic_rest.filters.DynamicFilterBackend._build_requested_prefetches to filter the # remote request with the actual id. # see «special cases» in documentation matches = self.quote_rexep.findall(col.sql) if len(matches) == 2: table, field = matches current = self.aliases[table] self.query.is_prefetch_related = True else: raise NotSupportedError("Only Col in sql select is supported") elif isinstance(col, Col): current = self.aliases[col.alias] # type: Alias field = col.target.name else: raise NotSupportedError("Only Col in sql select is supported") if current.m2m is not None: final_att_name = current.m2m.name current = current.parent else: final_att_name = field return current, final_att_name
def handle(self, *app_labels, **options): app_label = options.get("app_label") model_name = options.get("model_name") concurrently = options.get("concurrently") model = apps.get_model(app_label, model_name) if not model: raise OperationalError(f"Cannot find a model named '{model_name}'") if not issubclass(model, PostgresMaterializedViewModel): raise NotSupportedError( f"Model {model.__name__} is not a `PostgresMaterializedViewModel`" ) model.refresh(concurrently=concurrently)
def db_type(self, connection): # I've just copied the same column types Django uses for UUIDs. if connection.settings_dict['ENGINE'] == 'django.db.backends.sqlite3': return 'char(32)' # A 16 byte BLOB might be better... elif connection.settings_dict[ 'ENGINE'] == 'django.db.backends.postgresql': return 'uuid' elif connection.settings_dict['ENGINE'] == 'django_cockroachdb': return 'uuid' elif connection.settings_dict['ENGINE'] == 'django.db.backends.mysql': return 'binary(16)' elif connection.settings_dict['ENGINE'] == 'django.db.backends.oracle': return 'VARCHAR2(32)' else: raise NotSupportedError( "You are trying to use a backend that is not currently supported by this field." )
def alter_field(self, model, old_field, new_field, strict=False): old_field_name = old_field.name table_name = model._meta.db_table _, old_column_name = old_field.get_attname_column() if (new_field.name != old_field_name and not self.connection.features.supports_atomic_references_rename and self._is_referenced_by_fk_constraint( table_name, old_column_name, ignore_self=True)): if self.connection.in_atomic_block: raise NotSupportedError(( "Renaming the %r.%r column while in a transaction is not " "supported on SQLite < 3.26 because it would break referential " "integrity. Try adding `atomic = False` to the Migration class." ) % (model._meta.db_table, old_field_name)) with atomic(self.connection.alias): super().alter_field(model, old_field, new_field, strict=strict) # Follow SQLite's documented procedure for performing changes # that don't affect the on-disk content. # https://sqlite.org/lang_altertable.html#otheralter with self.connection.cursor() as cursor: schema_version = cursor.execute( "PRAGMA schema_version").fetchone()[0] cursor.execute("PRAGMA writable_schema = 1") references_template = ' REFERENCES "%s" ("%%s") ' % table_name new_column_name = new_field.get_attname_column()[1] search = references_template % old_column_name replacement = references_template % new_column_name cursor.execute( "UPDATE sqlite_master SET sql = replace(sql, %s, %s)", (search, replacement), ) cursor.execute("PRAGMA schema_version = %d" % (schema_version + 1)) cursor.execute("PRAGMA writable_schema = 0") # The integrity check will raise an exception and rollback # the transaction if the sqlite_master updates corrupt the # database. cursor.execute("PRAGMA integrity_check") # Perform a VACUUM to refresh the database representation from # the sqlite_master table. with self.connection.cursor() as cursor: cursor.execute("VACUUM") else: super().alter_field(model, old_field, new_field, strict=strict)
def join_results(row, resolved): """ a generator that will generate each results possible for the row data and the resolved data :param row: :param resolved: :return: """ if not resolved: yield [] return resolved = list(resolved) res = [] while resolved: alias, db_column = resolved.pop(0) try: raw_val = row[alias][db_column] except KeyError: res.append(None) continue # get the field of this model by either the column or the field name if this # is a reverse related field for field in alias.model._meta.concrete_fields: if field.column == db_column: break else: field = alias.model._meta.get_field(db_column) if hasattr(field, "to_python"): python_val = field.to_python(raw_val) res.append(python_val) elif isinstance(raw_val, list): for val in raw_val: for subresult in join_results(row, resolved[:]): yield res + [val] + subresult return else: raise NotSupportedError( "the result from the api for %s.%s is not supported : %s" % (alias.model, db_column, raw_val)) yield res
def save(self, *args, **kwargs): if hasattr(self, "_try_update_tenant"): raise NotSupportedError("Tenant column of a row cannot be updated.") current_tenant = get_current_tenant() tenant_value = get_current_tenant_value() set_object_tenant(self, tenant_value) if self.tenant_value and tenant_value != self.tenant_value: self_tenant = get_object_tenant(self) set_current_tenant(self_tenant) try: obj = super(TenantModelMixin, self).save(*args, **kwargs) finally: set_current_tenant(current_tenant) return obj
def alter_field(self, model, old_field, new_field, strict=False): old_field_name = old_field.name if (new_field.name != old_field_name and any(r.field_name == old_field.name for r in model._meta.related_objects)): if self.connection.in_atomic_block: raise NotSupportedError(( 'Renaming the %r.%r column while in a transaction is not ' 'supported on SQLite because it would break referential ' 'integrity. Try adding `atomic = False` to the Migration class.' ) % (model._meta.db_table, old_field_name)) with atomic(self.connection.alias): super().alter_field(model, old_field, new_field, strict=strict) # Follow SQLite's documented procedure for performing changes # that don't affect the on-disk content. # https://sqlite.org/lang_altertable.html#otheralter with self.connection.cursor() as cursor: schema_version = cursor.execute( 'PRAGMA schema_version').fetchone()[0] cursor.execute('PRAGMA writable_schema = 1') table_name = model._meta.db_table references_template = ' REFERENCES "%s" ("%%s") ' % table_name old_column_name = old_field.get_attname_column()[1] new_column_name = new_field.get_attname_column()[1] search = references_template % old_column_name replacement = references_template % new_column_name cursor.execute( 'UPDATE sqlite_master SET sql = replace(sql, %s, %s)', (search, replacement)) cursor.execute('PRAGMA schema_version = %d' % (schema_version + 1)) cursor.execute('PRAGMA writable_schema = 0') # The integrity check will raise an exception and rollback # the transaction if the sqlite_master updates corrupt the # database. cursor.execute('PRAGMA integrity_check') # Perform a VACUUM to refresh the database representation from # the sqlite_master table. with self.connection.cursor() as cursor: cursor.execute('VACUUM') else: super().alter_field(model, old_field, new_field, strict=strict)
def join_results(row, resolved): """ a generator that will generate each results possible for the row data and the resolved data :param row: :param resolved: :return: """ if not resolved: yield [] return resolved = list(resolved) res = [] while resolved: alias, attrname = resolved.pop(0) try: raw_val = row[alias][attrname] except KeyError: res.append(None) continue field = alias.model._meta.get_field(attrname) if hasattr(field, "to_python"): python_val = field.to_python(raw_val) res.append(python_val) elif isinstance(raw_val, list): for val in raw_val: for subresult in join_results(row, resolved[:]): yield res + [val] + subresult return else: raise NotSupportedError( "the result from the api for %s.%s is not supported : %s" % (alias.model, attrname, raw_val)) yield res