def test_check_constraints(self): """ Tests creating/deleting CHECK constraints """ # Create the tables with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the constraint exists constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): if details["columns"] == ["height"] and details["check"]: break else: self.fail("No check constraint for height found") # Alter the column to remove it new_field = IntegerField(null=True, blank=True) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field(Author, Author._meta.get_field_by_name("height")[0], new_field, strict=True) constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): if details["columns"] == ["height"] and details["check"]: self.fail("Check constraint for height found") # Alter the column to re-add it with connection.schema_editor() as editor: editor.alter_field(Author, new_field, Author._meta.get_field_by_name("height")[0], strict=True) constraints = connection.introspection.get_constraints(connection.cursor(), Author._meta.db_table) for name, details in constraints.items(): if details["columns"] == ["height"] and details["check"]: break else: self.fail("No check constraint for height found")
def test_create_model(self): """ Tests the CreateModel operation. Most other tests use this operation as part of setup, so check failures here first. """ operation = migrations.CreateModel( "Pony", [ ("id", models.AutoField(primary_key=True)), ("pink", models.IntegerField(default=1)), ], ) # Test the state alteration project_state = ProjectState() new_state = project_state.clone() operation.state_forwards("test_crmo", new_state) self.assertEqual(new_state.models["test_crmo", "pony"].name, "Pony") self.assertEqual(len(new_state.models["test_crmo", "pony"].fields), 2) # Test the database alteration self.assertTableNotExists("test_crmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crmo", editor, project_state, new_state) self.assertTableExists("test_crmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmo", editor, new_state, project_state) self.assertTableNotExists("test_crmo_pony") # And deconstruction definition = operation.deconstruct() self.assertEqual(definition[0], "CreateModel") self.assertEqual(len(definition[1]), 2) self.assertEqual(len(definition[2]), 0) self.assertEqual(definition[1][0], "Pony")
def test_alter_unique_together(self): """ Tests the AlterUniqueTogether operation. """ project_state = self.set_up_test_model("test_alunto") # Test the state alteration operation = migrations.AlterUniqueTogether("Pony", [("pink", "weight")]) new_state = project_state.clone() operation.state_forwards("test_alunto", new_state) self.assertEqual(len(project_state.models["test_alunto", "pony"].options.get("unique_together", set())), 0) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1) # Make sure we can insert duplicate rows cursor = connection.cursor() cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_alunto", editor, project_state, new_state) cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") with self.assertRaises(IntegrityError): with atomic(): cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alunto", editor, new_state, project_state) cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (1, 1, 1)") cursor.execute("INSERT INTO test_alunto_pony (id, pink, weight) VALUES (2, 1, 1)") cursor.execute("DELETE FROM test_alunto_pony") # Test flat unique_together operation = migrations.AlterUniqueTogether("Pony", ("pink", "weight")) operation.state_forwards("test_alunto", new_state) self.assertEqual(len(new_state.models["test_alunto", "pony"].options.get("unique_together", set())), 1)
def test_unique_together(self): """ Tests removing and adding unique_together constraints on a model. """ # Create the table with connection.schema_editor() as editor: editor.create_model(UniqueTest) # Ensure the fields are unique to begin with UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2011, slug="foo") UniqueTest.objects.create(year=2011, slug="bar") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter the model to it's non-unique-together companion with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, UniqueTest._meta.unique_together, []) # Ensure the fields are no longer unique UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.create(year=2012, slug="foo") UniqueTest.objects.all().delete() # Alter it back new_new_field = SlugField(unique=True) new_new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_unique_together(UniqueTest, [], UniqueTest._meta.unique_together) # Ensure the fields are unique again UniqueTest.objects.create(year=2012, slug="foo") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") UniqueTest.objects.all().delete()
def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( "CREATE TABLE i_love_ponies (id int, special_thing int)", "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies")
def test_add_field_m2m(self): """ Tests the AddField operation with a ManyToManyField. """ project_state = self.set_up_test_model("test_adflmm", second_model=True) # Test the state alteration operation = migrations.AddField("Pony", "stables", models.ManyToManyField("Stable", related_name="ponies")) new_state = project_state.clone() operation.state_forwards("test_adflmm", new_state) self.assertEqual(len(new_state.models["test_adflmm", "pony"].fields), 4) # Test the database alteration self.assertTableNotExists("test_adflmm_pony_stables") with connection.schema_editor() as editor: operation.database_forwards("test_adflmm", editor, project_state, new_state) self.assertTableExists("test_adflmm_pony_stables") self.assertColumnNotExists("test_adflmm_pony", "stables") # Make sure the M2M field actually works with atomic(): new_apps = new_state.render() Pony = new_apps.get_model("test_adflmm", "Pony") p = Pony.objects.create(pink=False, weight=4.55) p.stables.create() self.assertEqual(p.stables.count(), 1) p.stables.all().delete() # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adflmm", editor, new_state, project_state) self.assertTableNotExists("test_adflmm_pony_stables")
def test_alter_field_pk_fk(self): """ Tests the AlterField operation on primary keys changes any FKs pointing to it. """ project_state = self.set_up_test_model("test_alflpkfk", related_model=True) # Test the state alteration operation = migrations.AlterField("Pony", "id", models.FloatField(primary_key=True)) new_state = project_state.clone() operation.state_forwards("test_alflpkfk", new_state) self.assertIsInstance(project_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.AutoField) self.assertIsInstance(new_state.models["test_alflpkfk", "pony"].get_field_by_name("id"), models.FloatField) # Test the database alteration id_type = [c.type_code for c in connection.introspection.get_table_description(connection.cursor(), "test_alflpkfk_pony") if c.name == "id"][0] fk_type = [c.type_code for c in connection.introspection.get_table_description(connection.cursor(), "test_alflpkfk_rider") if c.name == "pony_id"][0] self.assertEqual(id_type, fk_type) with connection.schema_editor() as editor: operation.database_forwards("test_alflpkfk", editor, project_state, new_state) id_type = [c.type_code for c in connection.introspection.get_table_description(connection.cursor(), "test_alflpkfk_pony") if c.name == "id"][0] fk_type = [c.type_code for c in connection.introspection.get_table_description(connection.cursor(), "test_alflpkfk_rider") if c.name == "pony_id"][0] self.assertEqual(id_type, fk_type) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_alflpkfk", editor, new_state, project_state) id_type = [c.type_code for c in connection.introspection.get_table_description(connection.cursor(), "test_alflpkfk_pony") if c.name == "id"][0] fk_type = [c.type_code for c in connection.introspection.get_table_description(connection.cursor(), "test_alflpkfk_rider") if c.name == "pony_id"][0] self.assertEqual(id_type, fk_type)
def test_run_sql(self): """ Tests the RunSQL operation. """ project_state = self.set_up_test_model("test_runsql") # Create the operation operation = migrations.RunSQL( # Use a multi-line string with a commment to test splitting on SQLite and MySQL respectively "CREATE TABLE i_love_ponies (id int, special_thing int);\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (1, 42); -- this is magic!\n" "INSERT INTO i_love_ponies (id, special_thing) VALUES (2, 51);\n", "DROP TABLE i_love_ponies", state_operations=[migrations.CreateModel("SomethingElse", [("id", models.AutoField(primary_key=True))])], ) # Test the state alteration new_state = project_state.clone() operation.state_forwards("test_runsql", new_state) self.assertEqual(len(new_state.models["test_runsql", "somethingelse"].fields), 1) # Make sure there's no table self.assertTableNotExists("i_love_ponies") # Test the database alteration with connection.schema_editor() as editor: operation.database_forwards("test_runsql", editor, project_state, new_state) self.assertTableExists("i_love_ponies") # Make sure all the SQL was processed with connection.cursor() as cursor: cursor.execute("SELECT COUNT(*) FROM i_love_ponies") self.assertEqual(cursor.fetchall()[0][0], 2) # And test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runsql", editor, new_state, project_state) self.assertTableNotExists("i_love_ponies")
def test_add_field(self): """ Tests the AddField operation. """ project_state = self.set_up_test_model("test_adfl") # Test the state alteration operation = migrations.AddField( "Pony", "height", models.FloatField(null=True, default=5), ) new_state = project_state.clone() operation.state_forwards("test_adfl", new_state) self.assertEqual(len(new_state.models["test_adfl", "pony"].fields), 4) field = [ f for n, f in new_state.models["test_adfl", "pony"].fields if n == "height" ][0] self.assertEqual(field.default, 5) # Test the database alteration self.assertColumnNotExists("test_adfl_pony", "height") with connection.schema_editor() as editor: operation.database_forwards("test_adfl", editor, project_state, new_state) self.assertColumnExists("test_adfl_pony", "height") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_adfl", editor, new_state, project_state) self.assertColumnNotExists("test_adfl_pony", "height")
def test_create_proxy_model(self): """ Tests that CreateModel ignores proxy models. """ project_state = self.set_up_test_model("test_crprmo") # Test the state alteration operation = migrations.CreateModel( "ProxyPony", [], options={"proxy": True}, bases=("test_crprmo.Pony", ), ) new_state = project_state.clone() operation.state_forwards("test_crprmo", new_state) self.assertIn(("test_crprmo", "proxypony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") with connection.schema_editor() as editor: operation.database_forwards("test_crprmo", editor, project_state, new_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crprmo", editor, new_state, project_state) self.assertTableNotExists("test_crprmo_proxypony") self.assertTableExists("test_crprmo_pony")
def test_rename_model(self): """ Tests the RenameModel operation. """ project_state = self.set_up_test_model("test_rnmo", related_model=True) # Test the state alteration operation = migrations.RenameModel("Pony", "Horse") new_state = project_state.clone() operation.state_forwards("test_rnmo", new_state) self.assertNotIn(("test_rnmo", "pony"), new_state.models) self.assertIn(("test_rnmo", "horse"), new_state.models) # Remember, RenameModel also repoints all incoming FKs and M2Ms self.assertEqual("test_rnmo.Horse", new_state.models["test_rnmo", "rider"].fields[1][1].rel.to) # Test the database alteration self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) with connection.schema_editor() as editor: operation.database_forwards("test_rnmo", editor, project_state, new_state) self.assertTableNotExists("test_rnmo_pony") self.assertTableExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id")) # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_rnmo", editor, new_state, project_state) self.assertTableExists("test_rnmo_pony") self.assertTableNotExists("test_rnmo_horse") if connection.features.supports_foreign_keys: self.assertFKExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_pony", "id")) self.assertFKNotExists("test_rnmo_rider", ["pony_id"], ("test_rnmo_horse", "id"))
def test_no_index_for_foreignkey(self): """ MySQL on InnoDB already creates indexes automatically for foreign keys. (#14180). An index should be created if db_constraint=False (#26171). """ storage = connection.introspection.get_storage_engine( connection.cursor(), ArticleTranslation._meta.db_table ) if storage != "InnoDB": self.skip("This test only applies to the InnoDB storage engine") index_sql = connection.schema_editor()._model_indexes_sql(ArticleTranslation) self.assertEqual(index_sql, [ 'CREATE INDEX `indexes_articletranslation_article_no_constraint_id_d6c0806b` ' 'ON `indexes_articletranslation` (`article_no_constraint_id`)' ]) # The index also shouldn't be created if the ForeignKey is added after # the model was created. with connection.schema_editor() as editor: new_field = ForeignKey(Article, CASCADE) new_field.set_attributes_from_name('new_foreign_key') editor.add_field(ArticleTranslation, new_field) self.assertEqual(editor.deferred_sql, [ 'ALTER TABLE `indexes_articletranslation` ' 'ADD CONSTRAINT `indexes_articletrans_new_foreign_key_id_d27a9146_fk_indexes_a` ' 'FOREIGN KEY (`new_foreign_key_id`) REFERENCES `indexes_article` (`id`)' ])
def test_create_model_inheritance(self): """ Tests the CreateModel operation on a multi-table inheritance setup. """ project_state = self.set_up_test_model("test_crmoih") # Test the state alteration operation = migrations.CreateModel( "ShetlandPony", [ ('pony_ptr', models.OneToOneField( auto_created=True, primary_key=True, to_field='id', serialize=False, to='test_crmoih.Pony', )), ("cuteness", models.IntegerField(default=1)), ], ) new_state = project_state.clone() operation.state_forwards("test_crmoih", new_state) self.assertIn(("test_crmoih", "shetlandpony"), new_state.models) # Test the database alteration self.assertTableNotExists("test_crmoih_shetlandpony") with connection.schema_editor() as editor: operation.database_forwards("test_crmoih", editor, project_state, new_state) self.assertTableExists("test_crmoih_shetlandpony") # And test reversal with connection.schema_editor() as editor: operation.database_backwards("test_crmoih", editor, new_state, project_state) self.assertTableNotExists("test_crmoih_shetlandpony")
def test_rename(self): """ Tests simple altering of fields """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") self.assertNotIn("display_name", columns) # Alter the name field's name new_field = CharField(max_length=254) new_field.set_attributes_from_name("display_name") with connection.schema_editor() as editor: editor.alter_field( Author, Author._meta.get_field_by_name("name")[0], new_field, strict=True, ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['display_name'][0], "CharField") self.assertNotIn("name", columns)
def test_db_table(self): """ Tests renaming of the table """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the table is there to begin with columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table with connection.schema_editor() as editor: editor.alter_db_table( Author, "schema_author", "schema_otherauthor", ) # Ensure the table is there afterwards Author._meta.db_table = "schema_otherauthor" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField") # Alter the table again with connection.schema_editor() as editor: editor.alter_db_table( Author, "schema_otherauthor", "schema_author", ) # Ensure the table is still there Author._meta.db_table = "schema_author" columns = self.column_classes(Author) self.assertEqual(columns['name'][0], "CharField")
def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field( Author, new_field, ) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type = columns['awesome'][0] self.assertEqual(field_type, connection.features.introspected_boolean_field_type(new_field, created_separately=True))
def test_alter_null_to_not_null(self): """ #23609 - Tests handling of default values when altering from NULL to NOT NULL. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure the field is right to begin with columns = self.column_classes(Author) self.assertTrue(columns['height'][1][6]) # Create some test data Author.objects.create(name='Not null author', height=12) Author.objects.create(name='Null author') # Verify null value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertIsNone(Author.objects.get(name='Null author').height) # Alter the height field to NOT NULL with default new_field = PositiveIntegerField(default=42) new_field.set_attributes_from_name("height") with connection.schema_editor() as editor: editor.alter_field( Author, Author._meta.get_field_by_name("height")[0], new_field ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertFalse(columns['height'][1][6]) # Verify default value self.assertEqual(Author.objects.get(name='Not null author').height, 12) self.assertEqual(Author.objects.get(name='Null author').height, 42)
def test_fk(self): "Tests that creating tables out of FK order, then repointing, works" # Create the table with connection.schema_editor() as editor: editor.create_model(Book) editor.create_model(Author) editor.create_model(Tag) # Check that initial tables are there list(Author.objects.all()) list(Book.objects.all()) # Make sure the FK constraint is present with self.assertRaises(IntegrityError): Book.objects.create( author_id=1, title="Much Ado About Foreign Keys", pub_date=datetime.datetime.now(), ) # Repoint the FK constraint new_field = ForeignKey(Tag) new_field.set_attributes_from_name("author") with connection.schema_editor() as editor: editor.alter_field( Book, Book._meta.get_field_by_name("author")[0], new_field, strict=True, ) # Make sure the new FK constraint is present constraints = self.get_constraints(Book._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["author_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) break else: self.fail("No FK constraint for author_id found")
def test_add_field_temp_default(self): """ Tests adding fields to models with a temporary default """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = CharField(max_length=30, default="Godwin") new_field.set_attributes_from_name("surname") with connection.schema_editor() as editor: editor.add_field( Author, new_field, ) # Ensure the field is right afterwards columns = self.column_classes(Author) self.assertEqual(columns['surname'][0], "CharField") self.assertEqual(columns['surname'][1][6], connection.features.interprets_empty_strings_as_nulls)
def test_m2m(self): """ Tests adding/removing M2M fields on models """ # Create the tables with connection.schema_editor() as editor: editor.create_model(AuthorWithM2M) editor.create_model(TagM2MTest) # Create an M2M field new_field = ManyToManyField("schema.TagM2MTest", related_name="authors") new_field.contribute_to_class(AuthorWithM2M, "tags") try: # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) # Add the field with connection.schema_editor() as editor: editor.add_field( Author, new_field, ) # Ensure there is now an m2m table there columns = self.column_classes(new_field.rel.through) self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField") # Remove the M2M table again with connection.schema_editor() as editor: editor.remove_field( Author, new_field, ) # Ensure there's no m2m table there self.assertRaises(DatabaseError, self.column_classes, new_field.rel.through) finally: # Cleanup model states AuthorWithM2M._meta.local_many_to_many.remove(new_field)
def test_primary_key(self): """ Tests altering of the primary key """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the table is there and has the right PK self.assertTrue( self.get_indexes(Tag._meta.db_table)['id']['primary_key'], ) # Alter to change the PK new_field = SlugField(primary_key=True) new_field.set_attributes_from_name("slug") new_field.model = Tag with connection.schema_editor() as editor: editor.remove_field(Tag, Tag._meta.get_field_by_name("id")[0]) editor.alter_field( Tag, Tag._meta.get_field_by_name("slug")[0], new_field, ) # Ensure the PK changed self.assertNotIn( 'id', self.get_indexes(Tag._meta.db_table), ) self.assertTrue( self.get_indexes(Tag._meta.db_table)['slug']['primary_key'], )
def test_install_plugin(self): """ Test we can load the example plugin that every version of MySQL ships with. """ assert not plugin_exists("metadata_lock_info") state = ProjectState() operation = InstallPlugin("metadata_lock_info", "metadata_lock_info.so") assert ( operation.describe() == "Installs plugin metadata_lock_info from metadata_lock_info.so" ) new_state = state.clone() with connection.schema_editor() as editor: operation.database_forwards("testapp", editor, state, new_state) assert plugin_exists("metadata_lock_info") new_state = state.clone() with connection.schema_editor() as editor: operation.database_backwards("testapp", editor, new_state, state) assert not plugin_exists("metadata_lock_info")
def test_0004_change_sequence_owners(self): Schema.objects.mass_create('a', 'b', 'c') module = import_module('boardinghouse.migrations.0004_change_sequence_owners') module.change_existing_sequence_owners(apps, connection.schema_editor()) # How can I assert that this was executed, and did what it says on the box? module.noop(apps, connection.schema_editor())
def test_alter_unique_together(self): project_state = self.set_up_test_model() operation = migrations.AlterUniqueTogether('Pony', [('pink', 'weight')]) new_state = project_state.clone() operation.state_forwards('tests', new_state) @all_schemata def insert(cursor, **kwargs): cursor.execute('INSERT INTO tests_pony (pink, weight) VALUES (1, 1)') cursor.execute('INSERT INTO tests_pony (pink, weight) VALUES (1, 1)') cursor.execute('DELETE FROM tests_pony') @all_schemata def insert_fail(cursor, **kwargs): cursor.execute('INSERT INTO tests_pony (pink, weight) VALUES (1, 1)') with self.assertRaises(IntegrityError): with atomic(): cursor.execute('INSERT INTO tests_pony (pink, weight) VALUES (1, 1)') cursor.execute('DELETE FROM tests_pony') with connection.cursor() as cursor: insert(cursor) with connection.schema_editor() as editor: operation.database_forwards('tests', editor, project_state, new_state) insert_fail(cursor) with connection.schema_editor() as editor: operation.database_backwards('tests', editor, new_state, project_state) insert(cursor)
def test_constraint_name_method(self): from ..models import AwareModel, NaiveModel, SelfReferentialModel with connection.schema_editor() as editor: self.assertEqual(3, len(editor._constraint_names(AwareModel, index=True))) six.assertCountEqual( self, ['tests_awaremodel_pkey'], editor._constraint_names(AwareModel, primary_key=True) ) six.assertCountEqual(self, [ 'tests_awaremodel_pkey', 'tests_awaremodel_name_key' ], editor._constraint_names(AwareModel, unique=True)) six.assertCountEqual(self, [ 'tests_awaremodel_name_key' ], editor._constraint_names(AwareModel, unique=True, primary_key=False)) six.assertCountEqual(self, ['tests_awaremodel_pkey'], editor._constraint_names(AwareModel, primary_key=True, unique=True)) six.assertCountEqual(self, [], editor._constraint_names(AwareModel, foreign_key=True)) six.assertCountEqual(self, [], editor._constraint_names(AwareModel, foreign_key=True, primary_key=True)) six.assertCountEqual(self, ['tests_awaremodel_factor_check'], editor._constraint_names(AwareModel, check=True)) with connection.schema_editor() as editor: six.assertCountEqual(self, ['tests_naivemodel_pkey'], editor._constraint_names(NaiveModel, primary_key=True)) six.assertCountEqual(self, ['tests_naivemodel_name_key'], editor._constraint_names(NaiveModel, unique=True, primary_key=False)) with connection.schema_editor() as editor: # These constraint names appear to change between different versions of django or python? self.assertEqual(1, len(editor._constraint_names(SelfReferentialModel, foreign_key=True)))
def test_add_field_temp_default_boolean(self): """ Tests adding fields to models with a temporary default where the default is False. (#21783) """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) # Ensure there's no age field columns = self.column_classes(Author) self.assertNotIn("age", columns) # Add some rows of data Author.objects.create(name="Andrew", height=30) Author.objects.create(name="Andrea") # Add a not-null field new_field = BooleanField(default=False) new_field.set_attributes_from_name("awesome") with connection.schema_editor() as editor: editor.add_field( Author, new_field, ) # Ensure the field is right afterwards columns = self.column_classes(Author) # BooleanField are stored as TINYINT(1) on MySQL. field_type, field_info = columns['awesome'] if connection.vendor == 'mysql': self.assertEqual(field_type, 'IntegerField') self.assertEqual(field_info.precision, 1) else: self.assertEqual(field_type, 'BooleanField')
def save(self, force_insert=False, force_update=False, using=None, **kwargs): if not self.verbose_name: self.verbose_name = self.name model_class = self.model.as_model() field = self.as_field() with connection.schema_editor() as schema_editor: create = False if self.pk is None or not self.__class__.objects.filter(pk=self.pk).exists(): create = True if create: schema_editor.add_field(model_class, field) else: old = self.__class__.objects.filter(pk=self.pk) with connection.schema_editor() as schema_editor: app_models = apps.all_models[self.model.app.name] if str(old[0].model.name) in app_models: old_model = app_models[str(old[0].model.name)] old_fields = old_model._meta.fields old_field, new_field = None, None for f in old_fields: if f.name == old[0].name: old_field = f new_field = field schema_editor.alter_field(old_model, old_field, new_field) unregister_from_rest_router(old_model) super(DynamicModelField, self).save(force_insert, force_update, using) reregister_dynamo(self.model.as_model())
def test_running_without_changes(self): project_state = self.set_up_test_model("test_arstd") operation = AlterStorageEngine("Pony", from_engine="MyISAM", to_engine="InnoDB") assert table_storage_engine("test_arstd_pony") == "InnoDB" # Forwards - shouldn't actually do an ALTER since it is already InnoDB new_state = project_state.clone() operation.state_forwards("test_arstd", new_state) capturer = CaptureQueriesContext(connection) with capturer, connection.schema_editor() as editor: operation.database_forwards("test_arstd", editor, project_state, new_state) queries = [q['sql'] for q in capturer.captured_queries] assert not any(q.startswith('ALTER TABLE ') for q in queries), ( "One of the executed queries was an unexpected ALTER TABLE:\n{}" .format("\n".join(queries)) ) assert table_storage_engine("test_arstd_pony") == "InnoDB" # Backwards - will actually ALTER since it is going 'back' to MyISAM with connection.schema_editor() as editor: operation.database_backwards("test_arstd", editor, new_state, project_state) assert table_storage_engine("test_arstd_pony") == "MyISAM"
def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn("title", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table)) # Alter to remove the index new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field(Book, Book._meta.get_field_by_name("title")[0], new_field, strict=True) # Ensure the table is there and has no index self.assertNotIn("title", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table)) # Alter to re-add the index with connection.schema_editor() as editor: editor.alter_field(Book, new_field, Book._meta.get_field_by_name("title")[0], strict=True) # Ensure the table is there and has the index again self.assertIn("title", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table)) # Add a unique column, verify that creates an implicit index with connection.schema_editor() as editor: editor.add_field(Book, BookWithSlug._meta.get_field_by_name("slug")[0]) self.assertIn("slug", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table)) # Remove the unique, check the index goes with it new_field2 = CharField(max_length=20, unique=False) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field(BookWithSlug, BookWithSlug._meta.get_field_by_name("slug")[0], new_field2, strict=True) self.assertNotIn("slug", connection.introspection.get_indexes(connection.cursor(), Book._meta.db_table))
def test_no_index_for_foreignkey(self): """ MySQL on InnoDB already creates indexes automatically for foreign keys. (#14180). An index should be created if db_constraint=False (#26171). """ storage = connection.introspection.get_storage_engine( connection.cursor(), ArticleTranslation._meta.db_table ) if storage != "InnoDB": self.skip("This test only applies to the InnoDB storage engine") index_sql = [str(statement) for statement in connection.schema_editor()._model_indexes_sql(ArticleTranslation)] self.assertEqual(index_sql, [ 'CREATE INDEX `indexes_articletranslation_article_no_constraint_id_d6c0806b` ' 'ON `indexes_articletranslation` (`article_no_constraint_id`)' ]) # The index also shouldn't be created if the ForeignKey is added after # the model was created. field_created = False try: with connection.schema_editor() as editor: new_field = ForeignKey(Article, CASCADE) new_field.set_attributes_from_name('new_foreign_key') editor.add_field(ArticleTranslation, new_field) field_created = True # No deferred SQL. The FK constraint is included in the # statement to add the field. self.assertFalse(editor.deferred_sql) finally: if field_created: with connection.schema_editor() as editor: editor.remove_field(ArticleTranslation, new_field)
def test_unique(self): """ Tests removing and adding unique constraints to a single column. """ # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) # Ensure the field is unique to begin with Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be non-unique new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field( Tag, Tag._meta.get_field("slug"), new_field, strict=True, ) # Ensure the field is no longer unique Tag.objects.create(title="foo", slug="foo") Tag.objects.create(title="bar", slug="foo") Tag.objects.all().delete() # Alter the slug field to be unique new_new_field = SlugField(unique=True) new_new_field.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field( Tag, new_field, new_new_field, strict=True, ) # Ensure the field is unique again Tag.objects.create(title="foo", slug="foo") self.assertRaises(IntegrityError, Tag.objects.create, title="bar", slug="foo") Tag.objects.all().delete() # Rename the field new_field = SlugField(unique=False) new_field.set_attributes_from_name("slug2") with connection.schema_editor() as editor: editor.alter_field( Tag, Tag._meta.get_field("slug"), TagUniqueRename._meta.get_field("slug2"), strict=True, ) # Ensure the field is still unique TagUniqueRename.objects.create(title="foo", slug2="foo") self.assertRaises(IntegrityError, TagUniqueRename.objects.create, title="bar", slug2="foo") Tag.objects.all().delete()
def test_fk_db_constraint(self): "Tests that the db_constraint parameter is respected" # Create the table with connection.schema_editor() as editor: editor.create_model(Tag) editor.create_model(Author) editor.create_model(BookWeak) # Check that initial tables are there list(Author.objects.all()) list(Tag.objects.all()) list(BookWeak.objects.all()) # Check that BookWeak doesn't have an FK constraint constraints = self.get_constraints(BookWeak._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["author_id"] and details['foreign_key']: self.fail("FK constraint for author_id found") # Make a db_constraint=False FK new_field = ForeignKey(Tag, db_constraint=False) new_field.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.add_field( Author, new_field, ) # Make sure no FK constraint is present constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.fail("FK constraint for tag_id found") # Alter to one with a constraint new_field_2 = ForeignKey(Tag) new_field_2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field( Author, new_field, new_field_2, strict=True, ) # Make sure the new FK constraint is present constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.assertEqual(details['foreign_key'], ('schema_tag', 'id')) break else: self.fail("No FK constraint for tag_id found") # Alter to one without a constraint again new_field_2 = ForeignKey(Tag) new_field_2.set_attributes_from_name("tag") with connection.schema_editor() as editor: editor.alter_field( Author, new_field_2, new_field, strict=True, ) # Make sure no FK constraint is present constraints = self.get_constraints(Author._meta.db_table) for name, details in constraints.items(): if details['columns'] == ["tag_id"] and details['foreign_key']: self.fail("FK constraint for tag_id found")
def apply_operations(self, app_label, project_state, operations): migration = Migration('name', app_label) migration.operations = operations with connection.schema_editor() as editor: return migration.apply(project_state, editor)
def _create_test_model(self, fields=None, model_name='TestModel', options=None): if connection.vendor == 'mysql': self.skipTest( reason='MySQL doesn\'t support schema changes inside an ' 'atomic block.') # Obtain the app_config and app_label from the test's module path app_config = apps.get_containing_app_config( object_name=self.__class__.__module__) app_label = app_config.label class Meta: pass setattr(Meta, 'app_label', app_label) if options is not None: for key, value in options.items(): setattr(Meta, key, value) def save(instance, *args, **kwargs): # Custom .save() method to use random primary key values. if instance.pk: return models.Model.self(instance, *args, **kwargs) else: instance.pk = RandomPrimaryKeyModelMonkeyPatchMixin.get_unique_primary_key( model=instance._meta.model) instance.id = instance.pk return instance.save_base(force_insert=True) attrs = { '__module__': self.__class__.__module__, 'save': save, 'Meta': Meta } if fields: attrs.update(fields) # Clear previous model registration before re-registering it again to # avoid conflict with test models with the same name, in the same app # but from another test module. apps.all_models[app_label].pop(model_name.lower(), None) if PY3: TestModel = type(model_name, (models.Model, ), attrs) else: TestModel = type(force_bytes(model_name), (models.Model, ), attrs) setattr(self, model_name, TestModel) self._test_models.append(TestModel) with connection.schema_editor() as schema_editor: schema_editor.create_model(model=TestModel) ContentType.objects.clear_cache()
def _test_range_overlaps(self, constraint): # Create exclusion constraint. self.assertNotIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) with connection.schema_editor() as editor: editor.add_constraint(HotelReservation, constraint) self.assertIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) # Add initial reservations. room101 = Room.objects.create(number=101) room102 = Room.objects.create(number=102) datetimes = [ timezone.datetime(2018, 6, 20), timezone.datetime(2018, 6, 24), timezone.datetime(2018, 6, 26), timezone.datetime(2018, 6, 28), timezone.datetime(2018, 6, 29), ] reservation = HotelReservation.objects.create( datespan=DateRange(datetimes[0].date(), datetimes[1].date()), start=datetimes[0], end=datetimes[1], room=room102, ) constraint.validate(HotelReservation, reservation) HotelReservation.objects.create( datespan=DateRange(datetimes[1].date(), datetimes[3].date()), start=datetimes[1], end=datetimes[3], room=room102, ) HotelReservation.objects.create( datespan=DateRange(datetimes[3].date(), datetimes[4].date()), start=datetimes[3], end=datetimes[4], room=room102, cancelled=True, ) # Overlap dates. with self.assertRaises(IntegrityError), transaction.atomic(): reservation = HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, ) msg = f"Constraint “{constraint.name}” is violated." with self.assertRaisesMessage(ValidationError, msg): constraint.validate(HotelReservation, reservation) reservation.save() # Valid range. other_valid_reservations = [ # Other room. HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room101, ), # Cancelled reservation. HotelReservation( datespan=(datetimes[1].date(), datetimes[1].date()), start=datetimes[1], end=datetimes[2], room=room102, cancelled=True, ), # Other adjacent dates. HotelReservation( datespan=(datetimes[3].date(), datetimes[4].date()), start=datetimes[3], end=datetimes[4], room=room102, ), ] for reservation in other_valid_reservations: constraint.validate(HotelReservation, reservation) HotelReservation.objects.bulk_create(other_valid_reservations) # Excluded fields. constraint.validate( HotelReservation, HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, ), exclude={"room"}, ) constraint.validate( HotelReservation, HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, ), exclude={"datespan", "start", "end", "room"}, )
def test_run_python(self): """ Tests the RunPython operation """ project_state = self.set_up_test_model("test_runpython", mti_model=True) # Create the operation def inner_method(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.create(pink=1, weight=3.55) Pony.objects.create(weight=5) def inner_method_reverse(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") Pony.objects.filter(pink=1, weight=3.55).delete() Pony.objects.filter(weight=5).delete() operation = migrations.RunPython(inner_method, reverse_code=inner_method_reverse) # Test the state alteration does nothing new_state = project_state.clone() operation.state_forwards("test_runpython", new_state) self.assertEqual(new_state, project_state) # Test the database alteration self.assertEqual( project_state.render().get_model("test_runpython", "Pony").objects.count(), 0) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual( project_state.render().get_model("test_runpython", "Pony").objects.count(), 2) # Now test reversal self.assertTrue(operation.reversible) with connection.schema_editor() as editor: operation.database_backwards("test_runpython", editor, project_state, new_state) self.assertEqual( project_state.render().get_model("test_runpython", "Pony").objects.count(), 0) # Now test we can't use a string with self.assertRaises(ValueError): operation = migrations.RunPython("print 'ahahaha'") # Also test reversal fails, with an operation identical to above but without reverse_code set no_reverse_operation = migrations.RunPython(inner_method) self.assertFalse(no_reverse_operation.reversible) with connection.schema_editor() as editor: no_reverse_operation.database_forwards("test_runpython", editor, project_state, new_state) with self.assertRaises(NotImplementedError): no_reverse_operation.database_backwards( "test_runpython", editor, new_state, project_state) self.assertEqual( project_state.render().get_model("test_runpython", "Pony").objects.count(), 2) def create_ponies(models, schema_editor): Pony = models.get_model("test_runpython", "Pony") pony1 = Pony.objects.create(pink=1, weight=3.55) self.assertIsNot(pony1.pk, None) pony2 = Pony.objects.create(weight=5) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_ponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual( project_state.render().get_model("test_runpython", "Pony").objects.count(), 4) def create_shetlandponies(models, schema_editor): ShetlandPony = models.get_model("test_runpython", "ShetlandPony") pony1 = ShetlandPony.objects.create(weight=4.0) self.assertIsNot(pony1.pk, None) pony2 = ShetlandPony.objects.create(weight=5.0) self.assertIsNot(pony2.pk, None) self.assertNotEqual(pony1.pk, pony2.pk) operation = migrations.RunPython(create_shetlandponies) with connection.schema_editor() as editor: operation.database_forwards("test_runpython", editor, project_state, new_state) self.assertEqual( project_state.render().get_model("test_runpython", "Pony").objects.count(), 6) self.assertEqual( project_state.render().get_model("test_runpython", "ShetlandPony").objects.count(), 2)
def delete_table(cls): with connection.schema_editor() as schema_editor: schema_editor.delete_model(cls)
def test_index_together_single_list(self): # Test for using index_together with a single list (#22172) index_sql = connection.schema_editor()._model_indexes_sql(IndexTogetherSingleList) self.assertEqual(len(index_sql), 1)
def test_where_clauses(self): """where clauses generate correctly""" with connection.schema_editor() as editor: sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events': True }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" = true')) sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events': False }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" = false')) sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events': 42 }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" = 42')) sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events__lt': 42 }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" < 42')) sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events__gt': 42 }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" > 42')) sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events__lte': 42 }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" <= 42')) sql = PgPartialIndex( fields=['has_events'], name='test_partial', where={ 'has_events__gte': 42 }, ).create_sql(Thread, editor) self.assertTrue(sql.endswith('WHERE "has_events" >= 42'))
def test_indexes(self): """ Tests creation/altering of indexes """ # Create the table with connection.schema_editor() as editor: editor.create_model(Author) editor.create_model(Book) # Ensure the table is there and has the right index self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to remove the index new_field = CharField(max_length=100, db_index=False) new_field.set_attributes_from_name("title") with connection.schema_editor() as editor: editor.alter_field( Book, Book._meta.get_field("title"), new_field, strict=True, ) # Ensure the table is there and has no index self.assertNotIn( "title", self.get_indexes(Book._meta.db_table), ) # Alter to re-add the index with connection.schema_editor() as editor: editor.alter_field( Book, new_field, Book._meta.get_field("title"), strict=True, ) # Ensure the table is there and has the index again self.assertIn( "title", self.get_indexes(Book._meta.db_table), ) # Add a unique column, verify that creates an implicit index with connection.schema_editor() as editor: editor.add_field( Book, BookWithSlug._meta.get_field("slug"), ) self.assertIn( "slug", self.get_indexes(Book._meta.db_table), ) # Remove the unique, check the index goes with it new_field2 = CharField(max_length=20, unique=False) new_field2.set_attributes_from_name("slug") with connection.schema_editor() as editor: editor.alter_field( BookWithSlug, BookWithSlug._meta.get_field("slug"), new_field2, strict=True, ) self.assertNotIn( "slug", self.get_indexes(Book._meta.db_table), )
def main(): schema_editor = connection.schema_editor() migrate_system_role_binding(apps, schema_editor)
def test_virtual_relation_indexes(self): """Test indexes are not created for related objects""" index_sql = connection.schema_editor()._model_indexes_sql(Article) self.assertEqual(len(index_sql), 1)
def alter_column(self, model, new_field): with connection.schema_editor() as editor: editor.alter_field(model, self.initial_field, new_field)
def delete_meter_usage(): with connection.schema_editor() as schema_editor: schema_editor.delete_model(MeterUsage)
def drop_table(self, model): with connection.schema_editor() as editor: editor.delete_model(model)
def drop_column(self, model, field): with connection.schema_editor() as editor: editor.remove_field(model, field)
def _cleanup(self): """Drop the test table.""" with connection.schema_editor() as editor: # delete the table editor.delete_model(Author)
def add_column(self, model, field): with connection.schema_editor() as editor: editor.add_field(model, field)
def create_hit_table(date_obj): table_name = 'hit_log_{}'.format(date_obj.strftime('%Y%m%d')) model_cls = get_hit_log_model(db_table=table_name) with connection.schema_editor() as schema_editor: schema_editor.create_model(model_cls)
def alter_table(self, new_model): old_name = self.initial_model._meta.db_table new_name = new_model._meta.db_table with connection.schema_editor() as editor: editor.alter_db_table(new_model, old_name, new_name)
def setUpClass(cls): setup_instance() setup_database() with connection.schema_editor() as editor: # Create the table editor.create_model(Event)
def tearDownClass(cls): # Delete the schema for the test model with connection.schema_editor() as schema_editor: schema_editor.delete_model(cls.model) super().tearDownClass()
def migrate_json_fields_expensive(table, columns): batchsize = 50000 ct = ContentType.objects.get_by_natural_key(*table.split('_', 1)) model = ct.model_class() # Phase 1: add the new columns, making them nullable to avoid populating them with connection.schema_editor() as schema_editor: # See: https://docs.djangoproject.com/en/3.1/ref/schema-editor/ for colname in columns: f = model._meta.get_field(colname) _, _, args, kwargs = f.deconstruct() kwargs['null'] = True new_f = f.__class__(*args, **kwargs) new_f.set_attributes_from_name(f'_{colname}') schema_editor.add_field(model, new_f) # Create a trigger to make sure new data automatically gets put in both fields. with connection.cursor() as cursor: # It's a little annoying, I think this trigger will re-do # the same work as the update query in Phase 2 cursor.execute(f""" create or replace function update_{table}_{colname}() returns trigger as $body$ begin new._{colname} = new.{colname}::jsonb return new; end $body$ language plpgsql; """) cursor.execute(f""" create trigger {table}_{colname}_trigger before insert or update on {table} for each row execute procedure update_{table}_{colname}; """) # Phase 2: copy over the data with connection.cursor() as cursor: rows = 0 for i in itertools.count(0, batchsize): cursor.execute(f"select count(1) from {table} where id >= %s;", (i, )) if not cursor.fetchone()[0]: break column_expr = ', '.join(f"_{colname} = {colname}::jsonb" for colname in columns) cursor.execute( f""" update {table} set {column_expr} where id >= %s and id < %s; """, (i, i + batchsize), ) rows += cursor.rowcount logger.debug(f"Batch {i} to {i + batchsize} copied on {table}.") logger.warning(f"Data copied for {rows} rows on {table}.") # Phase 3: drop the old column and rename the new one with connection.schema_editor() as schema_editor: # FIXME: Grab a lock explicitly here? for colname in columns: with connection.cursor() as cursor: cursor.execute(f"drop trigger {table}_{colname}_trigger;") cursor.execute(f"drop function update_{table}_{colname};") f = model._meta.get_field(colname) _, _, args, kwargs = f.deconstruct() kwargs['null'] = True new_f = f.__class__(*args, **kwargs) new_f.set_attributes_from_name(f'_{colname}') schema_editor.remove_field(model, f) _, _, args, kwargs = new_f.deconstruct() f = new_f.__class__(*args, **kwargs) f.set_attributes_from_name(colname) schema_editor.alter_field(model, new_f, f)
def tearDownClass(cls): with connection.schema_editor() as editor: # delete the table editor.delete_model(Event) teardown_database() teardown_instance()
def tearDownModule(): for model in (GROUP_MODEL, USER_MODEL): with connection.schema_editor() as schema_editor: schema_editor.delete_model(model)
def schema_editor(self): # collect_sql=True -> do not actually execute. return connection.schema_editor(collect_sql=True)
def test_create_relationship_table(self, mock_execute): expected_statements = [ call( 'CREATE COLUMN TABLE "TEST_DHP_RELATIONMODEL" ' '("ID" INTEGER NOT NULL PRIMARY KEY, "TO_ONE_FIELD_ID" INTEGER NOT NULL)', None), call( 'CREATE COLUMN TABLE "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ' '("ID" INTEGER NOT NULL PRIMARY KEY, ' '"RELATIONMODEL_ID" INTEGER NOT NULL, ' '"COMPLEXMODEL_ID" INTEGER NOT NULL)' if django.VERSION >= (1, 9) else 'CREATE COLUMN TABLE "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ' '("ID" INTEGER NOT NULL PRIMARY KEY, ' '"RELATIONMODEL_ID" INTEGER NOT NULL, ' '"COMPLEXMODEL_ID" INTEGER NOT NULL, ' 'UNIQUE ("RELATIONMODEL_ID", "COMPLEXMODEL_ID"))', None), call( 'CREATE SEQUENCE "TEST_DHP_RELATIONMODEL_ID_SEQ" ' 'RESET BY SELECT IFNULL(MAX("ID"),0) + 1 FROM "TEST_DHP_RELATIONMODEL"', []), call( 'ALTER TABLE "TEST_DHP_RELATIONMODEL" ' 'ADD CONSTRAINT "TEST_DHP_RELATIONMODEL_TO_ONE_FIELD_ID_B93780F9_FK_TEST_DHP_COMPLEXMODEL_ID" ' 'FOREIGN KEY ("TO_ONE_FIELD_ID") REFERENCES "TEST_DHP_COMPLEXMODEL" ("ID") ON DELETE CASCADE', []), call( 'CREATE INDEX "TEST_DHP_RELATIONMODEL_2E33486B" ON "TEST_DHP_RELATIONMODEL" ("TO_ONE_FIELD_ID")', []), call( 'CREATE SEQUENCE "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD_ID_SEQ" ' 'RESET BY SELECT IFNULL(MAX("ID"),0) + 1 FROM "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD"', []), call( 'ALTER TABLE "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ADD CONSTRAINT ' '"TEST_DHP_RELATIONMODEL_TO_MANY_FIELD_RELATIONMODEL_ID_4E69A849_FK_TEST_DHP_RELATIONMODEL_ID" ' 'FOREIGN KEY ("RELATIONMODEL_ID") REFERENCES "TEST_DHP_RELATIONMODEL" ("ID") ON DELETE CASCADE', []), call( 'ALTER TABLE "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ADD CONSTRAINT ' '"TEST_DHP_RELATIONMODEL_TO_MANY_FIELD_COMPLEXMODEL_ID_414707C4_FK_TEST_DHP_COMPLEXMODEL_ID" ' 'FOREIGN KEY ("COMPLEXMODEL_ID") REFERENCES "TEST_DHP_COMPLEXMODEL" ("ID") ON DELETE CASCADE', []), call( 'CREATE INDEX "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD_87DCF9A5" ' 'ON "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ("RELATIONMODEL_ID")', []), call( 'CREATE INDEX "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD_370D6EEB" ' 'ON "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ("COMPLEXMODEL_ID")', []), ] if django.VERSION >= (1, 9): expected_statements.insert( 8, call( 'ALTER TABLE "TEST_DHP_RELATIONMODEL_TO_MANY_FIELD" ADD CONSTRAINT ' '"TEST_DHP_RELATIONMODEL_TO_MANY_FIELD_RELATIONMODEL_ID_7FEAA1CD_UNIQ" ' 'UNIQUE ("RELATIONMODEL_ID", "COMPLEXMODEL_ID")', [])) with connection.schema_editor() as editor: editor.create_model(RelationModel) self.assertSequenceEqual(mock_execute.call_args_list, expected_statements)
def tearDown(self): with connection.schema_editor() as schema_editor: schema_editor.delete_model(self.model_cls)
def tearDown(self): with connection.schema_editor() as schema_editor: schema_editor.delete_model(HiveQuery)
def _test_range_overlaps(self, constraint): # Create exclusion constraint. self.assertNotIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) with connection.schema_editor() as editor: editor.add_constraint(HotelReservation, constraint) self.assertIn(constraint.name, self.get_constraints(HotelReservation._meta.db_table)) # Add initial reservations. room101 = Room.objects.create(number=101) room102 = Room.objects.create(number=102) datetimes = [ timezone.datetime(2018, 6, 20), timezone.datetime(2018, 6, 24), timezone.datetime(2018, 6, 26), timezone.datetime(2018, 6, 28), timezone.datetime(2018, 6, 29), ] HotelReservation.objects.create( datespan=DateRange(datetimes[0].date(), datetimes[1].date()), start=datetimes[0], end=datetimes[1], room=room102, ) HotelReservation.objects.create( datespan=DateRange(datetimes[1].date(), datetimes[3].date()), start=datetimes[1], end=datetimes[3], room=room102, ) # Overlap dates. with self.assertRaises(IntegrityError), transaction.atomic(): reservation = HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room102, ) reservation.save() # Valid range. HotelReservation.objects.bulk_create([ # Other room. HotelReservation( datespan=(datetimes[1].date(), datetimes[2].date()), start=datetimes[1], end=datetimes[2], room=room101, ), # Cancelled reservation. HotelReservation( datespan=(datetimes[1].date(), datetimes[1].date()), start=datetimes[1], end=datetimes[2], room=room102, cancelled=True, ), # Other adjacent dates. HotelReservation( datespan=(datetimes[3].date(), datetimes[4].date()), start=datetimes[3], end=datetimes[4], room=room102, ), ])