def test_alter_constraints(self): """ Tests that going from a PostiveIntegerField to an IntegerField drops the constraint on the database. """ # Only applies to databases that support CHECK constraints if not db.has_check_constraints: return # Make the test table db.create_table("test_alterc", [ ('num', models.PositiveIntegerField()), ]) # Add in some test values db.execute("INSERT INTO test_alterc (num) VALUES (1)") db.execute("INSERT INTO test_alterc (num) VALUES (2)") # Ensure that adding a negative number is bad db.commit_transaction() db.start_transaction() try: db.execute("INSERT INTO test_alterc (num) VALUES (-3)") except: db.rollback_transaction() else: self.fail("Could insert a negative integer into a PositiveIntegerField.") # Alter it to a normal IntegerField db.alter_column("test_alterc", "num", models.IntegerField()) # It should now work db.execute("INSERT INTO test_alterc (num) VALUES (-3)") db.delete_table("test_alterc") # We need to match up for tearDown db.start_transaction()
def _class_prepared_handler(sender, **kwargs): """ Signal handler for class_prepared. This will be run for every model, looking for the moment when all dependent models are prepared for the first time. It will then run the given function, only once. """ sender_app=sender._meta.app_label.lower()+'.'+sender._meta.object_name already_prepared=set([sender_app]) for app,models in app_cache.app_models.items(): for model_name,model in models.items(): already_prepared.add(app.lower()+'.'+model_name) if all([x in already_prepared for x in dependencies]): db.start_transaction() try: # We need to disconnect, otherwise each new dynamo model generation # will trigger it and cause a "maximim recursion error" class_prepared.disconnect(_class_prepared_handler,weak=False) fn() except DatabaseError, message: # If tables are missing altogether, not much we can do # until syncdb/migrate is run. "The code must go on" in this # case, without running our function completely. At least # database operations will be rolled back. db.rollback_transaction() # Better connect again if message<>'no such table: dynamo_metamodel': class_prepared.connect(_class_prepared_handler, weak=False) else: raise else: db.commit_transaction()
def drop_field(app_name, model_name, field_name): """ Drop the given field from the app's model """ # Table is typically appname_modelname, but it could be different # always best to be sure. from .fields import CategoryM2MField, CategoryFKField from .settings import FIELD_REGISTRY try: from south.db import db except ImportError: raise ImproperlyConfigured( _('%(dependency) must be installed for this command to work') % {'dependency': 'South'}) mdl = models.get_model(app_name, model_name) fld = '%s.%s.%s' % (app_name, model_name, field_name) if isinstance(FIELD_REGISTRY[fld], CategoryFKField): print( _('Dropping ForeignKey %(field_name) from %(model_name)') % { 'field_name': field_name, 'model_name': model_name }) try: db.start_transaction() table_name = mdl._meta.db_table db.delete_column(table_name, field_name) db.commit_transaction() except DatabaseError, e: db.rollback_transaction() raise e
def forwards(self, orm): # Adding field 'Post.is_private' db.start_transaction() safe_add_column( 'askbot_post', 'is_private', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) # Adding field 'ReplyAddress.reply_action' safe_add_column('askbot_replyaddress', 'reply_action', self.gf('django.db.models.fields.CharField')( default='auto_answer_or_comment', max_length=32), keep_default=False) # Changing field 'ReplyAddress.post' db.alter_column( 'askbot_replyaddress', 'post_id', self.gf('django.db.models.fields.related.ForeignKey')( null=True, to=orm['askbot.Post'])) db.commit_transaction() try: db.start_transaction() # Adding field 'User.interesting_tags' safe_add_column(u'auth_user', 'email_signature', self.gf('django.db.models.fields.TextField')( blank=True, default=''), keep_default=False) db.commit_transaction() except: db.rollback_transaction()
def drop_field(app_name, model_name, field_name): """ Drop the given field from the app's model """ # Table is typically appname_modelname, but it could be different # always best to be sure. from .fields import CategoryM2MField, CategoryFKField from .settings import FIELD_REGISTRY try: from south.db import db except ImportError: raise ImproperlyConfigured(_('%(dependency) must be installed for this command to work') % {'dependency' : 'South'}) mdl = models.get_model(app_name, model_name) fld = '%s.%s.%s' % (app_name, model_name, field_name) if isinstance(FIELD_REGISTRY[fld], CategoryFKField): print (_('Dropping ForeignKey %(field_name) from %(model_name)') % {'field_name' : field_name, 'model_name' : model_name}) try: db.start_transaction() table_name = mdl._meta.db_table db.delete_column(table_name, field_name) db.commit_transaction() except DatabaseError, e: db.rollback_transaction() raise e
def test_dry_rename(self): """ Test column renaming while --dry-run is turned on (should do nothing) See ticket #65 """ cursor = connection.cursor() db.create_table("test_drn", [('spam', models.BooleanField(default=False))]) # Make sure we can select the column cursor.execute("SELECT spam FROM test_drn") # Rename it db.dry_run = True db.rename_column("test_drn", "spam", "eggs") db.dry_run = False cursor.execute("SELECT spam FROM test_drn") db.commit_transaction() db.start_transaction() try: cursor.execute("SELECT eggs FROM test_drn") except: pass else: self.fail("Dry-renamed new column could be selected!") db.rollback_transaction() db.delete_table("test_drn") db.start_transaction()
def forwards(self, orm): # Removing unique constraint on 'BadgeData', fields ['type', 'name'] #db.delete_unique('askbot_badgedata', ['type', 'name']) # Deleting field 'BadgeData.multiple' db.delete_column('askbot_badgedata', 'multiple') # Deleting field 'BadgeData.description' db.delete_column('askbot_badgedata', 'description') # Deleting field 'BadgeData.type' db.delete_column('askbot_badgedata', 'type') # Deleting field 'BadgeData.name' db.delete_column('askbot_badgedata', 'name') # Changing field 'BadgeData.slug' db.alter_column( 'askbot_badgedata', 'slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)) # Adding unique constraint on 'BadgeData', fields ['slug'] try: #work around the South 0.7.3 bug db.start_transaction() db.create_unique('askbot_badgedata', ['slug']) db.commit_transaction() except: db.rollback_transaction()
def forwards(self, orm): # move some models from maps to layers app # 0. add some missing fields (not for wfp) db.start_transaction() try: # Adding field 'Layer.bbox_top' db.add_column('maps_layer', 'bbox_top', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False) # Adding field 'Layer.bbox_bottom' db.add_column('maps_layer', 'bbox_bottom', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False) # Adding field 'Layer.bbox_left' db.add_column('maps_layer', 'bbox_left', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False) # Adding field 'Layer.bbox_right' db.add_column('maps_layer', 'bbox_right', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False) db.commit_transaction() except: print 'No need to create the fields, they are already there' db.rollback_transaction() # 1. layers_layer moved from maps_layer db.rename_table('maps_layer', 'layers_layer') if not db.dry_run: orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='layer').update(app_label='layers') # 2. layers_contactrole moved from maps_contactrole db.rename_table('maps_contactrole', 'layers_contactrole') if not db.dry_run: orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='contactrole').update(app_label='layers')
def test_alter(self): """ Test altering columns/tables """ db.create_table("test4", [ ('spam', models.BooleanField(default=False)), ('eggs', models.IntegerField()), ]) db.start_transaction() # Add a column db.add_column("test4", "add1", models.IntegerField(default=3), keep_default=False) # Add a FK with keep_default=False (#69) User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) db.add_column("test4", "user", models.ForeignKey(User, null=True), keep_default=False) db.delete_column("test4", "add1") db.rollback_transaction() db.delete_table("test4")
def test_alter_column_postgres_multiword(self): """ Tests altering columns with multiple words in Postgres types (issue #125) e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py """ db.create_table("test_multiword", [ ('col_datetime', models.DateTimeField(null=True)), ('col_integer', models.PositiveIntegerField(null=True)), ('col_smallint', models.PositiveSmallIntegerField(null=True)), ('col_float', models.FloatField(null=True)), ]) # test if 'double precision' is preserved db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True)) # test if 'CHECK ("%(column)s" >= 0)' is stripped db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True)) db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True)) # test if 'with timezone' is preserved if db.backend_name == "postgres": db.start_transaction() db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')") db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True)) assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0] db.rollback_transaction() db.delete_table("test_multiword")
def run_forwards(app, migrations, fake=False, db_dry_run=False, silent=False): """ Runs the specified migrations forwards, in order. """ for migration in migrations: app_name = get_app_name(app) if not silent: print " > %s: %s" % (app_name, migration) klass = get_migration(app, migration) if fake: if not silent: print " (faked)" else: if db_dry_run: db.dry_run = True db.start_transaction() try: klass().forwards() db.execute_deferred_sql() except: db.rollback_transaction() raise else: db.commit_transaction() if not db_dry_run: # Record us as having done this record = MigrationHistory.for_migration(app_name, migration) record.applied = datetime.datetime.utcnow() record.save()
def forwards(self, orm): # Removing unique constraint on 'BadgeData', fields ['type', 'name'] db.delete_unique('askbot_badgedata', ['type', 'name']) # Deleting field 'BadgeData.multiple' db.delete_column('askbot_badgedata', 'multiple') # Deleting field 'BadgeData.description' db.delete_column('askbot_badgedata', 'description') # Deleting field 'BadgeData.type' db.delete_column('askbot_badgedata', 'type') # Deleting field 'BadgeData.name' db.delete_column('askbot_badgedata', 'name') # Changing field 'BadgeData.slug' db.alter_column('askbot_badgedata', 'slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)) # Adding unique constraint on 'BadgeData', fields ['slug'] return try:#work around the South 0.7.3 bug db.start_transaction() db.create_unique('askbot_badgedata', ['slug']) db.commit_transaction() except: db.rollback_transaction()
def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, silent=False): """ Runs the specified migrations backwards, in order, skipping those migrations in 'ignore'. """ for migration in migrations: if migration not in ignore: app_name = get_app_name(app) if not silent: print " < %s: %s" % (app_name, migration) klass = get_migration(app, migration) if fake: if not silent: print " (faked)" else: if db_dry_run: db.dry_run = True db.start_transaction() try: klass().backwards() db.execute_deferred_sql() except: db.rollback_transaction() raise else: db.commit_transaction() if not db_dry_run: # Record us as having not done this record = MigrationHistory.for_migration(app_name, migration) record.delete()
def forwards(self, orm): # If there are duplicated document permissions, we'll have an error # when we try to create this index. So to protect against that, we # should delete those documents before we create the index. # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions try: db.start_transaction() duplicated_records = DocumentPermission.objects \ .values('doc_id', 'perms') \ .annotate(id_count=models.Count('id')) \ .filter(id_count__gt=1) # Delete all but the first document. for record in duplicated_records: docs = DocumentPermission.objects \ .values_list('id', flat=True) \ .filter( doc_id=record['doc_id'], perms=record['perms'], )[1:] docs = list(docs) logging.warn('Deleting permissions %s' % docs) DocumentPermission.objects.filter(id__in=docs).delete() db.commit_transaction() except Exception, e: db.rollback_transaction() raise e
def forwards(self, orm): # If there are duplicated documents, we'll have an error when we try to # create this index. So to protect against that, we should delete those # documents before we create the index. # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions try: db.start_transaction() duplicated_records = Document.objects \ .values('content_type_id', 'object_id') \ .annotate(id_count=models.Count('id')) \ .filter(id_count__gt=1) # Delete all but the first document. for record in duplicated_records: docs = Document.objects \ .values_list('id', flat=True) \ .filter( content_type_id=record['content_type_id'], object_id=record['object_id'], )[1:] docs = list(docs) logging.warn('Deleting documents %s' % docs) Document.objects.filter(id__in=docs).delete() db.commit_transaction() except Exception, e: db.rollback_transaction() raise e
def safe_add_column(table, column, column_data, keep_default=False): """when user calls syncdb with askbot the first time the auth_user table will be created together with the patched columns so, we need to add these columns here in separate transactions and roll back if they fail, if we want we could also record - which columns clash """ if db.backend_name in ('mysql', 'postgres'): if len( db.execute( 'select column_name from information_schema.columns where table_name=%s and column_name=%s', params=[table, column])) == 0: db.add_column(table, column, column_data, keep_default=keep_default) else: try: db.start_transaction() db.add_column(table, column, column_data, keep_default=keep_default) db.commit_transaction() return True except: db.rollback_transaction() return False
def backwards(self, orm): db.start_transaction() try: db.rename_column(u'dingos_factdatatype', 'namespace_id', 'name_space_id') db.commit_transaction() except: db.rollback_transaction()
def migrate_app(app, *args, **kwargs): """ Migrate all models of this app registered """ # pull the information from the registry if not isinstance(app, basestring): return fields = [fld for fld in field_registry.keys() if fld.startswith(app)] # call the south commands to add the fields/tables for fld in fields: app_name, model_name, field_name = fld.split('.') # Table is typically appname_modelname, but it could be different # always best to be sure. mdl = models.get_model(app_name, model_name) if isinstance(field_registry[fld], CategoryFKField): print "Adding ForeignKey %s to %s" % (field_name, model_name) try: db.start_transaction() table_name = mdl._meta.db_table field_registry[fld].default = -1 db.add_column(table_name, field_name, field_registry[fld], keep_default=False) db.commit_transaction() except DatabaseError, e: db.rollback_transaction() if "already exists" in str(e): print "Already exists" else: raise e elif isinstance(field_registry[fld], CategoryM2MField): print "Adding Many2Many table between %s and %s" % (model_name, 'category') table_name = "%s_%s" % (mdl._meta.db_table, 'categories') try: db.start_transaction() db.create_table( table_name, (('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), (model_name, models.ForeignKey(mdl, null=False)), ('category', models.ForeignKey(Category, null=False)))) db.create_unique(table_name, ['%s_id' % model_name, 'category_id']) db.commit_transaction() except DatabaseError, e: db.rollback_transaction() if "already exists" in str(e): print "Already exists" else: raise e
def test_text_default(self): """ MySQL cannot have blank defaults on TEXT columns. """ db.start_transaction() try: db.create_table("test_textdef", [("textcol", models.TextField(blank=True))]) finally: db.rollback_transaction()
def done_migrate(self, migration): db.start_transaction() try: # Record us as having done this self.record(migration) except: db.rollback_transaction() raise else: db.commit_transaction()
def test_text_default(self): """ MySQL cannot have blank defaults on TEXT columns. """ db.start_transaction() try: db.create_table("test_textdef", [ ('textcol', models.TextField(blank=True)), ]) finally: db.rollback_transaction()
def test_capitalised_constraints(self): """ Under PostgreSQL at least, capitalised constrains must be quoted. """ db.start_transaction() try: db.create_table("test_capconst", [("SOMECOL", models.PositiveIntegerField(primary_key=True))]) # Alter it so it's not got the check constraint db.alter_column("test_capconst", "SOMECOL", models.IntegerField()) finally: db.rollback_transaction()
def ignore_exists(fun, *args, **kwargs): try: fun(*args, **kwargs) except Exception, exc: if "exists" in str(exc): # don't panic, everything is ok: it's just a hack if db.has_ddl_transactions: db.rollback_transaction() db.start_transaction() return False raise
def migrate_app(app, *args, **kwargs): """ Migrate all models of this app registered """ try: from south.db import db except ImportError: raise ImproperlyConfigured("South must be installed for this command to work") # pull the information from the registry if not isinstance(app, basestring): return fields = [fld for fld in FIELD_REGISTRY.keys() if fld.startswith(app)] # call the south commands to add the fields/tables for fld in fields: app_name, model_name, field_name = fld.split('.') # Table is typically appname_modelname, but it could be different # always best to be sure. mdl = models.get_model(app_name, model_name) if isinstance(FIELD_REGISTRY[fld], CategoryFKField): print "Adding ForeignKey %s to %s" % (field_name, model_name) try: db.start_transaction() table_name = mdl._meta.db_table FIELD_REGISTRY[fld].default = -1 db.add_column(table_name, field_name, FIELD_REGISTRY[fld], keep_default=False) db.commit_transaction() except DatabaseError, e: db.rollback_transaction() if "already exists" in str(e): print "Already exists" else: raise e elif isinstance(FIELD_REGISTRY[fld], CategoryM2MField): print "Adding Many2Many table between %s and %s" % (model_name, 'category') table_name = "%s_%s" % (mdl._meta.db_table, 'categories') try: db.start_transaction() db.create_table(table_name, ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), (model_name, models.ForeignKey(mdl, null=False)), ('category', models.ForeignKey(Category, null=False)) )) db.create_unique(table_name, ['%s_id' % model_name, 'category_id']) db.commit_transaction() except DatabaseError, e: db.rollback_transaction() if "already exists" in str(e): print "Already exists" else: raise e
def delete_db_table(model_class): table_name = model_class._meta.db_table db.start_transaction() try: db.delete_table(table_name) db.commit_transaction() except Exception, e: print e pass db.rollback_transaction()
def forwards(self, orm): # As opposed to Document1, we can't just delete Document2 documents if # there is a duplication because it actually holds data. So instead # we'll just find duplications and emit a better error message. # Note we reset the `order_by` to make sure to prevent error with mysql: # ORDER BY clause is not in GROUP BY clause and contains nonaggregated column. duplicated_records = Document2.objects \ .values('uuid', 'version', 'is_history') \ .annotate(id_count=models.Count('id')) \ .filter(id_count__gt=1) \ .order_by() duplicated_records = list(duplicated_records) duplicated_ids = [] duplicated_ids_to_delete = [] for record in duplicated_records: docs = Document2.objects \ .values_list('id', flat=True) \ .filter( uuid=record['uuid'], version=record['version'], is_history=record['is_history'], ) \ .order_by('-last_modified') duplicated_ids.extend(docs) duplicated_ids_to_delete.extend( docs[1:]) # Keep the most recent duplicate if duplicated_ids: msg = 'Found duplicated Document2 records! %(duplicated_ids)s.\n' \ 'This will require manual merging or deletion of the Document2 records with duplicate ids and rerunning the migration.\n' \ 'For more information on manipulating Hue built in objects, have a look at our blog: http://gethue.com/hue-api-execute-some-builtin-commands/\n' \ 'For example, to delete Document2 records with the oldest duplicated ids execute the following:\n\n' \ 'from desktop.models import Document2\n' \ 'Document2.objects.filter(id__in=%(duplicated_ids_to_delete)s).delete()\n' % { 'duplicated_ids': duplicated_ids, 'duplicated_ids_to_delete': duplicated_ids_to_delete } logging.error(msg) raise RuntimeError(msg) try: db.start_transaction() # Adding unique constraint on 'Document2', fields ['uuid', 'version', 'is_history'] db.create_unique(u'desktop_document2', ['uuid', 'version', 'is_history']) db.commit_transaction() except Exception as e: db.rollback_transaction() raise e
def test_capitalised_constraints(self): """ Under PostgreSQL at least, capitalised constrains must be quoted. """ db.start_transaction() try: db.create_table("test_capconst", [ ('SOMECOL', models.PositiveIntegerField(primary_key=True)), ]) # Alter it so it's not got the check constraint db.alter_column("test_capconst", "SOMECOL", models.IntegerField()) finally: db.rollback_transaction()
def run_migration(self, migration): migration_function = self.direction(migration) db.start_transaction() try: migration_function() db.execute_deferred_sql() except: db.rollback_transaction() if not db.has_ddl_transactions: print self.run_migration_error(migration) raise else: db.commit_transaction()
def test_column_constraint(self): """ Tests that the value constraint of PositiveIntegerField is enforced on the database level. """ if not db.has_check_constraints: return db.create_table("test_column_constraint", [ ('spam', models.PositiveIntegerField()), ]) db.execute_deferred_sql() # Make sure we can't insert negative values db.commit_transaction() db.start_transaction() try: db.execute("INSERT INTO test_column_constraint VALUES (-42)") except: pass else: self.fail( "Could insert a negative value into a PositiveIntegerField.") db.rollback_transaction() # remove constraint db.alter_column("test_column_constraint", "spam", models.IntegerField()) db.execute_deferred_sql() # make sure the insertion works now db.execute('INSERT INTO test_column_constraint VALUES (-42)') db.execute('DELETE FROM test_column_constraint') # add it back again db.alter_column("test_column_constraint", "spam", models.PositiveIntegerField()) db.execute_deferred_sql() # it should fail again db.start_transaction() try: db.execute("INSERT INTO test_column_constraint VALUES (-42)") except: pass else: self.fail( "Could insert a negative value after changing an IntegerField to a PositiveIntegerField." ) db.rollback_transaction() db.delete_table("test_column_constraint") db.start_transaction()
def forwards(self, orm): # As opposed to Document1, we can't just delete Document2 documents if # there is a duplication because it actually holds data. So instead # we'll just find duplications and emit a better error message. # Note we reset the `order_by` to make sure to prevent error with mysql: # ORDER BY clause is not in GROUP BY clause and contains nonaggregated column. duplicated_records = Document2.objects \ .values('uuid', 'version', 'is_history') \ .annotate(id_count=models.Count('id')) \ .filter(id_count__gt=1) \ .order_by() duplicated_records = list(duplicated_records) duplicated_ids = [] duplicated_ids_to_delete = [] for record in duplicated_records: docs = Document2.objects \ .values_list('id', flat=True) \ .filter( uuid=record['uuid'], version=record['version'], is_history=record['is_history'], ) \ .order_by('-last_modified') duplicated_ids.extend(docs) duplicated_ids_to_delete.extend(docs[1:]) # Keep the most recent duplicate if duplicated_ids: msg = 'Found duplicated Document2 records! %(duplicated_ids)s.\n' \ 'This will require manual merging or deletion of the Document2 records with duplicate ids and rerunning the migration.\n' \ 'For more information on manipulating Hue built in objects, have a look at our blog: http://gethue.com/hue-api-execute-some-builtin-commands/\n' \ 'For example, to delete Document2 records with the oldest duplicated ids execute the following:\n\n' \ 'from desktop.models import Document2\n' \ 'Document2.objects.filter(id__in=%(duplicated_ids_to_delete)s).delete()\n' % { 'duplicated_ids': duplicated_ids, 'duplicated_ids_to_delete': duplicated_ids_to_delete } logging.error(msg) raise RuntimeError(msg) try: db.start_transaction() # Adding unique constraint on 'Document2', fields ['uuid', 'version', 'is_history'] db.create_unique(u'desktop_document2', ['uuid', 'version', 'is_history']) db.commit_transaction() except Exception, e: db.rollback_transaction() raise e
def test_datetime_default(self): """ Test that defaults are correctly not created for datetime columns """ end_of_world = datetime.datetime(2012, 12, 21, 0, 0, 1) try: from django.utils import timezone except ImportError: pass else: from django.conf import settings if getattr(settings, 'USE_TZ', False): end_of_world = end_of_world.replace(tzinfo=timezone.utc) db.create_table("test_datetime_def", [ ('col0', models.IntegerField(null=True)), ('col1', models.DateTimeField(default=end_of_world)), ('col2', models.DateTimeField(null=True)), ]) db.execute_deferred_sql() # insert a row db.execute( "INSERT INTO test_datetime_def (col0, col1, col2) values (null,%s,null)", [end_of_world]) db.alter_column("test_datetime_def", "col2", models.DateTimeField(default=end_of_world)) db.add_column("test_datetime_def", "col3", models.DateTimeField(default=end_of_world), keep_default=False) db.execute_deferred_sql() db.commit_transaction() # In the single existing row, we now expect col1=col2=col3=end_of_world... db.start_transaction() ends = db.execute("select col1,col2,col3 from test_datetime_def")[0] self.failUnlessEqual(len(ends), 3) for e in ends: self.failUnlessEqual(e, end_of_world) db.commit_transaction() # ...but there should not be a default in the database for col1 or col3 for cols in ["col1,col2", "col2,col3"]: db.start_transaction() statement = "insert into test_datetime_def (col0,%s) values (null,%%s,%%s)" % cols self.assertRaises(IntegrityError, db.execute, statement, [end_of_world, end_of_world]) db.rollback_transaction() db.start_transaction( ) # To preserve the sanity and semantics of this test class
def test_add_unique_fk(self): """ Test adding a ForeignKey with unique=True or a OneToOneField """ db.create_table("test_add_unique_fk", [ ('spam', models.BooleanField(default=False)) ]) db.start_transaction() db.add_column("test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model('Mock', 'mock'), null=True, unique=True)) db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model('Mock', 'mock'), null=True)) db.rollback_transaction() db.delete_table("test_add_unique_fk")
def forwards(self, orm): "Write your forwards methods here." try: db.start_transaction() entities = orm.Profile.objects.values('entity').all() print entities found = True db.commit_transaction() except: db.rollback_transaction() found = False if not found: db.add_column('profiles_profile', 'entity', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['entities.Entity'], null=True, blank=True), keep_default=False)
def backwards(self, orm): "Write your backwards methods here." try: db.start_transaction() entities = orm.Profile.objects.values('entity').all() print entities found = True db.commit_transaction() except: db.rollback_transaction() found = False if found: db.delete_column('profiles_profile', 'entity_id')
def test_add_unique_fk(self): """ Test adding a ForeignKey with unique=True or a OneToOneField """ db.create_table("test_add_unique_fk", [("spam", models.BooleanField(default=False))]) db.start_transaction() db.add_column( "test_add_unique_fk", "mock1", models.ForeignKey(db.mock_model("Mock", "mock"), null=True, unique=True) ) db.add_column("test_add_unique_fk", "mock2", models.OneToOneField(db.mock_model("Mock", "mock"), null=True)) db.rollback_transaction() db.delete_table("test_add_unique_fk")
def safe_add_column(table, column, column_data, keep_default = False): """when user calls syncdb with askbot the first time the auth_user table will be created together with the patched columns so, we need to add these columns here in separate transactions and roll back if they fail, if we want we could also record - which columns clash """ try: db.start_transaction() db.add_column(table, column, column_data, keep_default = keep_default) db.commit_transaction() return True except: db.rollback_transaction() return False
def test_alter_unique(self): """ Tests that unique constraints are properly created and deleted when altering columns. """ db.create_table("test_alter_unique", [ ('spam', models.IntegerField()), ('eggs', models.IntegerField(unique=True)), ]) db.execute_deferred_sql() # Make sure the unique constraint is created db.execute('INSERT INTO test_alter_unique VALUES (0, 42)') db.commit_transaction() db.start_transaction() try: db.execute("INSERT INTO test_alter_unique VALUES (1, 42)") except: pass else: self.fail( "Could insert the same integer twice into a field with unique=True." ) db.rollback_transaction() # remove constraint db.alter_column("test_alter_unique", "eggs", models.IntegerField()) # make sure the insertion works now db.execute('INSERT INTO test_alter_unique VALUES (1, 42)') # add it back again db.execute('DELETE FROM test_alter_unique WHERE spam=1') db.alter_column("test_alter_unique", "eggs", models.IntegerField(unique=True)) # it should fail again db.start_transaction() try: db.execute("INSERT INTO test_alter_unique VALUES (1, 42)") except: pass else: self.fail("Unique constraint not created during alter_column()") db.rollback_transaction() # Delete the unique index/constraint if db.backend_name != "sqlite3": db.delete_unique("test_alter_unique", ["eggs"]) db.delete_table("test_alter_unique") db.start_transaction()
def drop_field(app_name, model_name, field_name): """ Drop the given field from the app's model """ # Table is typically appname_modelname, but it could be different # always best to be sure. from .fields import CategoryM2MField, CategoryFKField from .registration import registry try: from south.db import db except ImportError: raise ImproperlyConfigured( _("%(dependency)s must be installed for this command to work") % {"dependency": "South"} ) mdl = models.get_model(app_name, model_name) fld = "%s.%s.%s" % (app_name, model_name, field_name) if isinstance(registry._field_registry[fld], CategoryFKField): print( ( _("Dropping ForeignKey %(field_name)s from %(model_name)s") % {"field_name": field_name, "model_name": model_name} ) ) try: db.start_transaction() table_name = mdl._meta.db_table db.delete_column(table_name, field_name) db.commit_transaction() except DatabaseError as e: db.rollback_transaction() raise e elif isinstance(registry._field_registry[fld], CategoryM2MField): print( ( _("Dropping Many2Many table between %(model_name)s and %(category_table)s") % {"model_name": model_name, "category_table": "category"} ) ) try: db.start_transaction() db.delete_table(table_name, cascade=False) db.commit_transaction() except DatabaseError as e: db.rollback_transaction() raise e
def forwards(self, orm): # move some models from maps to layers app # 0. add some missing fields (not for wfp) db.start_transaction() try: # Adding field 'Layer.bbox_top' db.add_column('maps_layer', 'bbox_top', self.gf('django.db.models.fields.FloatField')( null=True, blank=True), keep_default=False) # Adding field 'Layer.bbox_bottom' db.add_column('maps_layer', 'bbox_bottom', self.gf('django.db.models.fields.FloatField')( null=True, blank=True), keep_default=False) # Adding field 'Layer.bbox_left' db.add_column('maps_layer', 'bbox_left', self.gf('django.db.models.fields.FloatField')( null=True, blank=True), keep_default=False) # Adding field 'Layer.bbox_right' db.add_column('maps_layer', 'bbox_right', self.gf('django.db.models.fields.FloatField')( null=True, blank=True), keep_default=False) db.commit_transaction() except: print 'No need to create the fields, they are already there' db.rollback_transaction() # 1. layers_layer moved from maps_layer db.rename_table('maps_layer', 'layers_layer') if not db.dry_run: orm['contenttypes.contenttype'].objects.filter( app_label='maps', model='layer').update(app_label='layers') # 2. layers_contactrole moved from maps_contactrole db.rename_table('maps_contactrole', 'layers_contactrole') if not db.dry_run: orm['contenttypes.contenttype'].objects.filter( app_label='maps', model='contactrole').update(app_label='layers')
def test_foreign_keys(self): """ Tests foreign key creation, especially uppercase (see #61) """ Test = db.mock_model(model_name='Test', db_table='test5a', db_tablespace='', pk_field_name='ID', pk_field_type=models.AutoField, pk_field_args=[]) cursor = connection.cursor() db.start_transaction() db.create_table("test5a", [('ID', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True))]) db.create_table("test5b", [ ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('UNIQUE', models.ForeignKey(Test)), ]) db.execute_deferred_sql() db.rollback_transaction()
def get(self, *args, **kwargs): try: db.start_transaction() # Delete old RetrvScheme, clear data db.delete_table(RetrvSoftware._meta.db_table) RetrvSoftwareField.objects.all().delete() # Add builtin field for field in BuiltinRetrvField.objects.filter(scheme__current=True).order_by('sort'): RetrvSoftwareField( field_name = field.field_name, field_label = Software._meta.get_field(field.field_name).verbose_name, display = field.display, retrieve = field.retrieve, sort = field.sort).save() for field in CustomizedRetrvField.objects.filter(scheme__current=True).order_by('sort'): RetrvSoftwareField( field_name = field.field.field_name, field_label = field.field.field_label, display = field.display, retrieve = field.retrieve, sort = field.sort).save() # Recreate RetrvSoftware table fields = [(f.name, f) for f in RetrvSoftware._meta.local_fields] db.create_table(RetrvSoftware._meta.db_table, fields) db.execute_deferred_sql() # Export Softwares for scr in Software.objects.all(): target = RetrvSoftware() for field in BuiltinRetrvField.objects.filter(scheme__current=True).order_by('sort'): setattr(target, field.field_name, getattr(scr, field.field_name)) for extfield in SoftwareExtField.objects.filter(scr=scr): if hasattr(target, extfield.type.field_name): setattr(target, extfield.type.field_name, extfield.value) print target target.save() # Commit transaction db.commit_transaction() except: db.rollback_transaction() return HttpResponse('Failed') return HttpResponse('OK')
def test_column_constraint(self): """ Tests that the value constraint of PositiveIntegerField is enforced on the database level. """ if not db.has_check_constraints: return db.create_table("test_column_constraint", [ ('spam', models.PositiveIntegerField()), ]) db.execute_deferred_sql() # Make sure we can't insert negative values db.commit_transaction() db.start_transaction() try: db.execute("INSERT INTO test_column_constraint VALUES (-42)") except: pass else: self.fail("Could insert a negative value into a PositiveIntegerField.") db.rollback_transaction() # remove constraint db.alter_column("test_column_constraint", "spam", models.IntegerField()) db.execute_deferred_sql() # make sure the insertion works now db.execute('INSERT INTO test_column_constraint VALUES (-42)') db.execute('DELETE FROM test_column_constraint') # add it back again db.alter_column("test_column_constraint", "spam", models.PositiveIntegerField()) db.execute_deferred_sql() # it should fail again db.start_transaction() try: db.execute("INSERT INTO test_column_constraint VALUES (-42)") except: pass else: self.fail("Could insert a negative value after changing an IntegerField to a PositiveIntegerField.") db.rollback_transaction() db.delete_table("test_column_constraint") db.start_transaction()
def test_alter_unique(self): """ Tests that unique constraints are properly created and deleted when altering columns. """ db.create_table("test_alter_unique", [ ('spam', models.IntegerField()), ('eggs', models.IntegerField(unique=True)), ]) db.execute_deferred_sql() # Make sure the unique constraint is created db.execute('INSERT INTO test_alter_unique VALUES (0, 42)') db.commit_transaction() db.start_transaction() try: db.execute("INSERT INTO test_alter_unique VALUES (1, 42)") except: pass else: self.fail("Could insert the same integer twice into a field with unique=True.") db.rollback_transaction() # remove constraint db.alter_column("test_alter_unique", "eggs", models.IntegerField()) # make sure the insertion works now db.execute('INSERT INTO test_alter_unique VALUES (1, 42)') # add it back again db.execute('DELETE FROM test_alter_unique WHERE spam=1') db.alter_column("test_alter_unique", "eggs", models.IntegerField(unique=True)) # it should fail again db.start_transaction() try: db.execute("INSERT INTO test_alter_unique VALUES (1, 42)") except: pass else: self.fail("Unique constraint not created during alter_column()") db.rollback_transaction() # Delete the unique index/constraint if db.backend_name != "sqlite3": db.delete_unique("test_alter_unique", ["eggs"]) db.delete_table("test_alter_unique") db.start_transaction()
def test_alter(self): """ Test altering columns/tables """ db.create_table("test4", [ ('spam', models.BooleanField(default=False)), ('eggs', models.IntegerField()), ]) db.start_transaction() # Add a column db.add_column("test4", "add1", models.IntegerField(default=3), keep_default=False) # Add a FK with keep_default=False (#69) User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) db.add_column("test4", "user", models.ForeignKey(User), keep_default=False) db.rollback_transaction() db.delete_table("test4")
def test_alter_unique(self): """ Tests that unique constraints are not affected when altering columns (that's handled by create_/delete_unique) """ db.create_table("test_alter_unique", [ ('spam', models.IntegerField()), ('eggs', models.IntegerField(unique=True)), ]) db.execute_deferred_sql() # Make sure the unique constraint is created db.execute('INSERT INTO test_alter_unique (spam, eggs) VALUES (0, 42)') db.commit_transaction() db.start_transaction() try: db.execute( "INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)") except: pass else: self.fail( "Could insert the same integer twice into a unique field.") db.rollback_transaction() # Alter without unique=True (should not affect anything) db.alter_column("test_alter_unique", "eggs", models.IntegerField()) # Insertion should still fail db.start_transaction() try: db.execute( "INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)") except: pass else: self.fail( "Could insert the same integer twice into a unique field after alter_column with unique=False." ) db.rollback_transaction() # Delete the unique index/constraint if db.backend_name != "sqlite3": db.delete_unique("test_alter_unique", ["eggs"]) db.delete_table("test_alter_unique") db.start_transaction()
def drop_field(app_name, model_name, field_name): """ Drop the given field from the app's model """ # Table is typically appname_modelname, but it could be different # always best to be sure. from .fields import CategoryM2MField, CategoryFKField from .registration import registry try: from south.db import db except ImportError: raise ImproperlyConfigured( _('%(dependency)s must be installed for this command to work') % {'dependency': 'South'}) mdl = models.get_model(app_name, model_name) fld = '%s.%s.%s' % (app_name, model_name, field_name) if isinstance(registry._field_registry[fld], CategoryFKField): print((_('Dropping ForeignKey %(field_name)s from %(model_name)s') % { 'field_name': field_name, 'model_name': model_name })) try: db.start_transaction() table_name = mdl._meta.db_table db.delete_column(table_name, field_name) db.commit_transaction() except DatabaseError as e: db.rollback_transaction() raise e elif isinstance(registry._field_registry[fld], CategoryM2MField): print((_( 'Dropping Many2Many table between %(model_name)s and %(category_table)s' ) % { 'model_name': model_name, 'category_table': 'category' })) try: db.start_transaction() db.delete_table(table_name, cascade=False) db.commit_transaction() except DatabaseError as e: db.rollback_transaction() raise e
def test_datetime_default(self): """ Test that defaults are correctly not created for datetime columns """ end_of_world = datetime.datetime(2012, 12, 21, 0, 0, 1) try: from django.utils import timezone except ImportError: pass else: from django.conf import settings if getattr(settings, 'USE_TZ', False): end_of_world = end_of_world.replace(tzinfo=timezone.utc) db.create_table("test_datetime_def", [ ('col0', models.IntegerField(null=True)), ('col1', models.DateTimeField(default=end_of_world)), ('col2', models.DateTimeField(null=True)), ]) db.execute_deferred_sql() # insert a row db.execute("INSERT INTO test_datetime_def (col0, col1, col2) values (null,%s,null)", [end_of_world]) db.alter_column("test_datetime_def", "col2", models.DateTimeField(default=end_of_world)) db.add_column("test_datetime_def", "col3", models.DateTimeField(default=end_of_world)) db.execute_deferred_sql() db.commit_transaction() # In the single existing row, we now expect col1=col2=col3=end_of_world... db.start_transaction() ends = db.execute("select col1,col2,col3 from test_datetime_def")[0] self.failUnlessEqual(len(ends), 3) for e in ends: self.failUnlessEqual(e, end_of_world) db.commit_transaction() # ...but there should not be a default in the database for col1 or col3 for cols in ["col1,col2", "col2,col3"]: db.start_transaction() statement = "insert into test_datetime_def (col0,%s) values (null,%%s,%%s)" % cols self.assertRaises( IntegrityError, db.execute, statement, [end_of_world, end_of_world] ) db.rollback_transaction() db.start_transaction() # To preserve the sanity and semantics of this test class
def test_primary_key(self): """ Test the primary key operations """ db.create_table("test_pk", [ ('id', models.IntegerField(primary_key=True)), ('new_pkey', models.IntegerField()), ('eggs', models.IntegerField(unique=True)), ]) db.execute_deferred_sql() db.start_transaction() # Remove the default primary key, and make eggs it db.drop_primary_key("test_pk") db.create_primary_key("test_pk", "new_pkey") # Try inserting a now-valid row pair db.execute("INSERT INTO test_pk (id, new_pkey, eggs) VALUES (1, 2, 3), (1, 3, 4)") db.rollback_transaction() db.delete_table("test_pk")
def forwards(self, orm): """ This migration has been customized to support upgrades from Cloudera Enterprise 3.5, as well as Hue 1.2 """ try: db.rename_table('userman_ldapgroup', 'useradmin_ldapgroup') db.delete_column('useradmin_ldapgroup', 'hidden') except Exception, e: db.rollback_transaction() db.start_transaction() # Adding model 'LdapGroup' db.create_table('useradmin_ldapgroup', ( ('group', self.gf('django.db.models.fields.related.ForeignKey')(related_name='group', to=orm['auth.Group'])), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), )) db.send_create_signal('useradmin', ['LdapGroup'])
def safe_add_column(table, column, column_data, keep_default = False): """when user calls syncdb with askbot the first time the auth_user table will be created together with the patched columns so, we need to add these columns here in separate transactions and roll back if they fail, if we want we could also record - which columns clash """ if db.backend_name in ('mysql', 'postgres'): if len(db.execute('select column_name from information_schema.columns where table_name=%s and column_name=%s', params=[table, column])) == 0: db.add_column(table, column, column_data, keep_default = keep_default) else: try: db.start_transaction() db.add_column(table, column, column_data, keep_default = keep_default) db.commit_transaction() return True except: db.rollback_transaction() return False
def test_rename(self): """ Test column renaming """ cursor = connection.cursor() db.create_table("test_rn", [('spam', models.BooleanField(default=False))]) db.start_transaction() # Make sure we can select the column cursor.execute("SELECT spam FROM test_rn") # Rename it db.rename_column("test_rn", "spam", "eggs") cursor.execute("SELECT eggs FROM test_rn") try: cursor.execute("SELECT spam FROM test_rn") self.fail("Just-renamed column could be selected!") except: pass db.rollback_transaction() db.delete_table("test_rn")