def test_rename_gh380(self): u1 = User.create(id='charlie') u2 = User.create(id='huey') p1 = Page.create(name='p1-1', user=u1) p2 = Page.create(name='p2-1', user=u1) p3 = Page.create(name='p3-2', user=u2) migrate(self.migrator.rename_column('page', 'name', 'title')) column_names = self.get_column_names('page') self.assertEqual(column_names, set(['id', 'title', 'user_id'])) class NewPage(Model): title = TextField(unique=True, null=True) user = ForeignKeyField(User, null=True, related_name='newpages') class Meta: database = self.database db_table = Page._meta.db_table query = (NewPage .select( NewPage.title, NewPage.user) .order_by(NewPage.title)) self.assertEqual( [(np.title, np.user.id) for np in query], [('p1-1', 'charlie'), ('p2-1', 'charlie'), ('p3-2', 'huey')])
def test_rename_table(self): t1 = Tag.create(tag='t1') t2 = Tag.create(tag='t2') # Move the tag data into a new model/table. class Tag_asdf(Tag): pass self.assertEqual(Tag_asdf._meta.db_table, 'tag_asdf') # Drop the new table just to be safe. Tag_asdf.drop_table(True) # Rename the tag table. migrate(self.migrator.rename_table('tag', 'tag_asdf')) # Verify the data was moved. query = (Tag_asdf .select() .order_by(Tag_asdf.tag)) self.assertEqual([t.tag for t in query], ['t1', 't2']) # Verify the old table is gone. with self.database.transaction(): self.assertRaises( DatabaseError, Tag.create, tag='t3')
def test_add_foreign_key(self): if hasattr(Person, 'newtag_set'): delattr(Person, 'newtag_set') del Person._meta.reverse_rel['newtag_set'] # Ensure no foreign keys are present at the beginning of the test. self.assertEqual(self.database.get_foreign_keys('tag'), []) field = ForeignKeyField(Person, null=True, to_field=Person.id) migrate(self.migrator.add_column('tag', 'person_id', field)) class NewTag(Tag): person = field class Meta: db_table = 'tag' p = Person.create(first_name='First', last_name='Last') t1 = NewTag.create(tag='t1', person=p) t2 = NewTag.create(tag='t2') t1_db = NewTag.get(NewTag.tag == 't1') self.assertEqual(t1_db.person, p) t2_db = NewTag.get(NewTag.tag == 't2') self.assertEqual(t2_db.person, None) foreign_keys = self.database.get_foreign_keys('tag') self.assertEqual(len(foreign_keys), 1) foreign_key = foreign_keys[0] self.assertEqual(foreign_key.column, 'person_id') self.assertEqual(foreign_key.dest_column, 'id') self.assertEqual(foreign_key.dest_table, 'person')
def test_add_foreign_key(self): if hasattr(Person, "newtag_set"): delattr(Person, "newtag_set") del Person._meta.reverse_rel["newtag_set"] # Ensure no foreign keys are present at the beginning of the test. self.assertEqual(self.database.get_foreign_keys("tag"), []) field = ForeignKeyField(Person, null=True, to_field=Person.id) migrate(self.migrator.add_column("tag", "person_id", field)) class NewTag(Tag): person = field class Meta: db_table = "tag" p = Person.create(first_name="First", last_name="Last") t1 = NewTag.create(tag="t1", person=p) t2 = NewTag.create(tag="t2") t1_db = NewTag.get(NewTag.tag == "t1") self.assertEqual(t1_db.person, p) t2_db = NewTag.get(NewTag.tag == "t2") self.assertEqual(t2_db.person, None) foreign_keys = self.database.get_foreign_keys("tag") self.assertEqual(len(foreign_keys), 1) foreign_key = foreign_keys[0] self.assertEqual(foreign_key.column, "person_id") self.assertEqual(foreign_key.dest_column, "id") self.assertEqual(foreign_key.dest_table, "person")
def test_sqlite_column_name_regression(self): BadNames.create(primary_data='pd', foreign_data='fd', data='d') migrator = SchemaMigrator.from_database(self.database) new_data = TextField(default='foo') migrate(migrator.add_column('bad_names', 'new_data', new_data), migrator.drop_column('bad_names', 'data')) columns = self.database.get_columns('bad_names') column_names = [column.name for column in columns] self.assertEqual(column_names, ['id', 'primary_data', 'foreign_data', 'new_data']) BNT = Table('bad_names', ('id', 'primary_data', 'foreign_data', 'new_data')).bind(self.database) self.assertEqual([row for row in BNT.select()], [{ 'id': 1, 'primary_data': 'pd', 'foreign_data': 'fd', 'new_data': 'foo'}]) # Verify constraints were carried over. data = {'primary_data': 'pd', 'foreign_data': 'xx', 'new_data': 'd'} self.assertRaises(IntegrityError, BNT.insert(data).execute) data.update(primary_data='px', foreign_data='fd') self.assertRaises(IntegrityError, BNT.insert(data).execute) data.update(foreign_data='fx') self.assertTrue(BNT.insert(data).execute())
def test_drop_not_null(self): self._create_people() migrate(self.migrator.drop_not_null("person", "first_name"), self.migrator.drop_not_null("person", "last_name")) p = Person.create(first_name=None, last_name=None) query = Person.select().where((Person.first_name >> None) & (Person.last_name >> None)) self.assertEqual(query.count(), 1)
def test_modify_not_null_foreign_key(self): user = User.create(id='charlie') Page.create(name='null user') Page.create(name='charlie', user=user) def addNotNull(): with self.database.transaction(): migrate(self.migrator.add_not_null('page', 'user_id')) if self._exception_add_not_null: self.assertRaises(IntegrityError, addNotNull) Page.update(user=user).where(Page.user.is_null()).execute() addNotNull() # And attempting to insert a null value results in an integrity error. with self.database.transaction(): with self.assertRaisesCtx((OperationalError, IntegrityError)): Page.create( name='fails', user=None) # Now we will drop it. with self.database.transaction(): migrate(self.migrator.drop_not_null('page', 'user_id')) self.assertEqual(Page.select().where(Page.user.is_null()).count(), 0) Page.create(name='succeeds', user=None) self.assertEqual(Page.select().where(Page.user.is_null()).count(), 1)
def alter_table(): title_field = CharField(default='') status_field = IntegerField(null=True) migrator = MySQLMigrator(mysql_db) migrate( migrator.add_column('User', 'user_id', title_field), )
def test_add_column_constraint(self): cf = CharField(null=True, constraints=[SQL('default \'foo\'')]) ff = FloatField(default=0., constraints=[Check('val < 1.0')]) t1 = Tag.create(tag='t1') migrate( self.migrator.add_column('tag', 'misc', cf), self.migrator.add_column('tag', 'val', ff)) class NewTag(Model): tag = CharField() misc = CharField() val = FloatField() class Meta: database = self.database table_name = Tag._meta.table_name t1_db = NewTag.get(NewTag.tag == 't1') self.assertEqual(t1_db.misc, 'foo') self.assertEqual(t1_db.val, 0.) with self.database.atomic(): self.assertRaises(IntegrityError, NewTag.create, tag='t2', misc='bar', val=2.) NewTag.create(tag='t3', misc='baz', val=0.9) t3_db = NewTag.get(NewTag.tag == 't3') self.assertEqual(t3_db.misc, 'baz') self.assertEqual(t3_db.val, 0.9)
def test_drop_foreign_key(self): migrate(self.migrator.drop_column('page', 'user_id')) columns = self.database.get_columns('page') self.assertEqual( sorted(column.name for column in columns), ['id', 'name']) self.assertEqual(self.database.get_foreign_keys('page'), [])
def test_rename_column(self): self._create_people() migrate( self.migrator.rename_column('person', 'first_name', 'first'), self.migrator.rename_column('person', 'last_name', 'last')) column_names = self.get_column_names('person') self.assertEqual(column_names, set(['id', 'first', 'last', 'dob'])) class NewPerson(Model): first = CharField() last = CharField() dob = DateField() class Meta: database = self.database db_table = Person._meta.db_table query = (NewPerson .select( NewPerson.first, NewPerson.last, NewPerson.dob) .order_by(NewPerson.first)) self.assertEqual(list(query.tuples()), self._person_data)
def test_add_index(self): # Create a unique index on first and last names. columns = ("first_name", "last_name") migrate(self.migrator.add_index("person", columns, True)) Person.create(first_name="first", last_name="last") with self.database.transaction(): self.assertRaises(IntegrityError, Person.create, first_name="first", last_name="last")
def test_drop_column(self): self._create_people() migrate( self.migrator.drop_column('person', 'last_name'), self.migrator.drop_column('person', 'dob')) column_names = self.get_column_names('person') self.assertEqual(column_names, set(['id', 'first_name']))
def add_index(flag=False): migrate( migrator.add_index('DailyPrice', ('trading_date', 'ticker'), True), ) if flag is True: migrate( migrator.add_index('StockInfo', ('ticker', 'market_id'), True), )
def test_add_column(self): # Create some fields with a variety of NULL / default values. df = DateTimeField(null=True) df_def = DateTimeField(default=datetime.datetime(2012, 1, 1)) cf = CharField(max_length=200, default="") bf = BooleanField(default=True) ff = FloatField(default=0) # Create two rows in the Tag table to test the handling of adding # non-null fields. t1 = Tag.create(tag="t1") t2 = Tag.create(tag="t2") # Convenience function for generating `add_column` migrations. def add_column(field_name, field_obj): return self.migrator.add_column("tag", field_name, field_obj) # Run the migration. migrate( add_column("pub_date", df), add_column("modified_date", df_def), add_column("comment", cf), add_column("is_public", bf), add_column("popularity", ff), ) # Create a new tag model to represent the fields we added. class NewTag(Model): tag = CharField() pub_date = df modified_date = df_def comment = cf is_public = bf popularity = ff class Meta: database = self.database db_table = Tag._meta.db_table query = NewTag.select( NewTag.id, NewTag.tag, NewTag.pub_date, NewTag.modified_date, NewTag.comment, NewTag.is_public, NewTag.popularity, ).order_by(NewTag.tag.asc()) # Verify the resulting rows are correct. self.assertEqual( list(query.tuples()), [ (t1.id, "t1", None, datetime.datetime(2012, 1, 1), "", True, 0.0), (t2.id, "t2", None, datetime.datetime(2012, 1, 1), "", True, 0.0), ], )
def test_index_preservation(self): self.reset_sql_history() migrate(self.migrator.rename_column( 'index_model', 'first_name', 'first')) queries = [x.msg for x in self.history] self.assertEqual(queries, [ # Get all the columns. ('PRAGMA "main".table_info("index_model")', None), # Get the table definition. ('select name, sql from sqlite_master ' 'where type=? and LOWER(name)=?', ['table', 'index_model']), # Get the indexes and indexed columns for the table. ('SELECT name, sql FROM "main".sqlite_master ' 'WHERE tbl_name = ? AND type = ? ORDER BY name', ('index_model', 'index')), ('PRAGMA "main".index_list("index_model")', None), ('PRAGMA "main".index_info("index_model_data")', None), ('PRAGMA "main".index_info("index_model_first_name_last_name")', None), # Get foreign keys. ('PRAGMA "main".foreign_key_list("index_model")', None), # Drop any temporary table, if it exists. ('DROP TABLE IF EXISTS "index_model__tmp__"', []), # Create a temporary table with the renamed column. ('CREATE TABLE "index_model__tmp__" (' '"id" INTEGER NOT NULL PRIMARY KEY, ' '"first" VARCHAR(255) NOT NULL, ' '"last_name" VARCHAR(255) NOT NULL, ' '"data" INTEGER NOT NULL)', []), # Copy data from original table into temporary table. ('INSERT INTO "index_model__tmp__" ' '("id", "first", "last_name", "data") ' 'SELECT "id", "first_name", "last_name", "data" ' 'FROM "index_model"', []), # Drop the original table. ('DROP TABLE "index_model"', []), # Rename the temporary table, replacing the original. ('ALTER TABLE "index_model__tmp__" RENAME TO "index_model"', []), # Re-create the indexes. ('CREATE UNIQUE INDEX "index_model_data" ' 'ON "index_model" ("data")', []), ('CREATE UNIQUE INDEX "index_model_first_name_last_name" ' 'ON "index_model" ("first", "last_name")', []) ])
def test_index_preservation(self): with count_queries() as qc: migrate(self.migrator.rename_column( 'indexmodel', 'first_name', 'first')) queries = [log.msg for log in qc.get_queries()] self.assertEqual(queries, [ # Get all the columns. ('PRAGMA table_info("indexmodel")', None), # Get the table definition. ('select name, sql from sqlite_master ' 'where type=? and LOWER(name)=?', ['table', 'indexmodel']), # Get the indexes and indexed columns for the table. ('SELECT name, sql FROM sqlite_master ' 'WHERE tbl_name = ? AND type = ? ORDER BY name', ('indexmodel', 'index')), ('PRAGMA index_list("indexmodel")', None), ('PRAGMA index_info("indexmodel_data")', None), ('PRAGMA index_info("indexmodel_first_name_last_name")', None), # Get foreign keys. ('PRAGMA foreign_key_list("indexmodel")', None), # Drop any temporary table, if it exists. ('DROP TABLE IF EXISTS indexmodel__tmp__', []), # Create a temporary table with the renamed column. ('CREATE TABLE indexmodel__tmp__ (' 'id INTEGER NOT NULL PRIMARY KEY, ' 'first VARCHAR(255) NOT NULL, ' 'last_name VARCHAR(255) NOT NULL, ' '"data" INTEGER NOT NULL)', []), # Copy data from original table into temporary table. ('INSERT INTO indexmodel__tmp__ ' '(id, "first", last_name, "data") ' 'SELECT id, first_name, last_name, "data" ' 'FROM indexmodel', []), # Drop the original table. ('DROP TABLE indexmodel', []), # Rename the temporary table, replacing the original. ('ALTER TABLE indexmodel__tmp__ RENAME TO indexmodel', []), # Re-create the indexes. ('CREATE UNIQUE INDEX indexmodel_data ' 'ON indexmodel ("data")', []), ('CREATE UNIQUE INDEX indexmodel_first_last_name ' 'ON indexmodel (first, last_name)', []) ])
def test_add_column_with_index_type(self): self.reset_sql_history() field = BinaryJSONField(default=dict, index=True, null=True) migrate(self.migrator.add_column('tag', 'metadata', field)) queries = [x.msg for x in self.history] self.assertEqual(queries, [ ('ALTER TABLE "tag" ADD COLUMN "metadata" JSONB', []), ('CREATE INDEX "tag_metadata" ON "tag" USING GIN ("metadata")', []), ])
def test_add_and_remove(self): operations = [] field = CharField(default='foo') for i in range(10): operations.append(self.migrator.add_column('tag', 'foo', field)) operations.append(self.migrator.drop_column('tag', 'foo')) migrate(*operations) col_names = self.get_column_names('tag') self.assertEqual(col_names, set(['id', 'tag']))
def test_drop_index(self): # Create a unique index. self.test_add_index() # Now drop the unique index. migrate(self.migrator.drop_index("person", "person_first_name_last_name")) Person.create(first_name="first", last_name="last") query = Person.select().where((Person.first_name == "first") & (Person.last_name == "last")) self.assertEqual(query.count(), 2)
def test_add_and_remove(self): operations = [] field = CharField(default="foo") for i in range(10): operations.append(self.migrator.add_column("tag", "foo", field)) operations.append(self.migrator.drop_column("tag", "foo")) migrate(*operations) col_names = self.get_column_names("tag") self.assertEqual(col_names, set(["id", "tag"]))
def test_add_column_with_index_type(self): from playhouse.postgres_ext import BinaryJSONField self.reset_sql_history() field = BinaryJSONField(default=dict, index=True, null=True) migrate(self.migrator.add_column('tag', 'metadata', field)) queries = [x.msg for x in self.history] self.assertEqual(queries, [ ('ALTER TABLE "tag" ADD COLUMN "metadata" JSONB', []), ('CREATE INDEX "tag_metadata" ON "tag" USING GIN ("metadata")', []), ])
def test_add_index(self): # Create a unique index on first and last names. columns = ('first_name', 'last_name') migrate(self.migrator.add_index('person', columns, True)) Person.create(first_name='first', last_name='last') with self.database.transaction(): self.assertRaises(IntegrityError, Person.create, first_name='first', last_name='last')
def test_modify_fk_constraint(self): self.reset_sql_history() new_fk = ForeignKeyField(User, User.id, null=True, on_delete='CASCADE') migrate(self.migrator.drop_column('page', 'user_id'), self.migrator.add_column('page', 'user_id', new_fk)) queries = [x.msg for x in self.history] self.assertEqual( queries, [ # Get all columns for table. ('PRAGMA "main".table_info("page")', None), # Get the SQL used to generate the table and indexes. ('select name, sql from sqlite_master ' 'where type=? and LOWER(name)=?', ['table', 'page']), ('SELECT name, sql FROM "main".sqlite_master ' 'WHERE tbl_name = ? AND type = ? ORDER BY name', ('page', 'index')), # Get the indexes and indexed columns for the table. ('PRAGMA "main".index_list("page")', None), ('PRAGMA "main".index_info("page_name")', None), ('PRAGMA "main".index_info("page_user_id")', None), ('PRAGMA "main".foreign_key_list("page")', None), # Clear out a temp table and create it w/o the user_id FK. ('DROP TABLE IF EXISTS "page__tmp__"', []), ('CREATE TABLE "page__tmp__" (' '"id" INTEGER NOT NULL PRIMARY KEY, "name" VARCHAR(100))', []), # Copy data into the temp table, drop the original and rename # the temp -> original. Recreate index(es). ('INSERT INTO "page__tmp__" ("id", "name") ' 'SELECT "id", "name" FROM "page"', []), ('DROP TABLE "page"', []), ('ALTER TABLE "page__tmp__" RENAME TO "page"', []), ('CREATE UNIQUE INDEX "page_name" ON "page" ("name")', []), # Add new foreign-key field with appropriate constraint. ('ALTER TABLE "page" ADD COLUMN "user_id" VARCHAR(20) ' 'REFERENCES "users" ("id") ON DELETE CASCADE', []), ('CREATE INDEX "page_user_id" ON "page" ("user_id")', []), ]) self.database.pragma('foreign_keys', 1) huey = User.create(id='huey') huey_page = Page.create(user=huey, name='huey page') self.assertEqual(Page.select().count(), 1) # Deleting the user will cascade to the associated page. User.delete().where(User.id == 'huey').execute() self.assertEqual(Page.select().count(), 0)
def test_rename_foreign_key(self): migrate(self.migrator.rename_column("page", "user_id", "huey_id")) columns = self.database.get_columns("page") self.assertEqual(sorted(column.name for column in columns), ["huey_id", "id", "name"]) foreign_keys = self.database.get_foreign_keys("page") self.assertEqual(len(foreign_keys), 1) foreign_key = foreign_keys[0] self.assertEqual(foreign_key.column, "huey_id") self.assertEqual(foreign_key.dest_column, "id") self.assertEqual(foreign_key.dest_table, "users")
def migrate_script(): with my_db.transaction(): # out_trade_no = CharField(default=None, null=True) real_name_authentication = CharField(default="未认证", choices=["已认证", "未认证"]) migrate( # migrator.drop_column('consumerecord', 'out_trade_no') migrator.add_column('virtualcard', 'real_name_authentication', real_name_authentication), )
def test_add_index(self): # Create a unique index on first and last names. columns = ('first_name', 'last_name') migrate(self.migrator.add_index('person', columns, True)) Person.create(first_name='first', last_name='last') with self.database.transaction(): self.assertRaises( IntegrityError, Person.create, first_name='first', last_name='last')
def test_add_column(self): # Create some fields with a variety of NULL / default values. df = DateTimeField(null=True) df_def = DateTimeField(default=datetime.datetime(2012, 1, 1)) cf = CharField(max_length=200, default='') bf = BooleanField(default=True) ff = FloatField(default=0) # Create two rows in the Tag table to test the handling of adding # non-null fields. t1 = Tag.create(tag='t1') t2 = Tag.create(tag='t2') # Convenience function for generating `add_column` migrations. add_column = partial(self.migrator.add_column, 'tag') # Run the migration. migrate( add_column('pub_date', df), add_column('modified_date', df_def), add_column('comment', cf), add_column('is_public', bf), add_column('popularity', ff)) # Create a new tag model to represent the fields we added. class NewTag(Model): tag = CharField() pub_date = df modified_date = df_def comment = cf is_public = bf popularity = ff class Meta: database = self.database db_table = Tag._meta.db_table query = (NewTag .select( NewTag.id, NewTag.tag, NewTag.pub_date, NewTag.modified_date, NewTag.comment, NewTag.is_public, NewTag.popularity) .order_by(NewTag.tag.asc())) # Verify the resulting rows are correct. self.assertEqual(list(query.tuples()), [ (t1.id, 't1', None, datetime.datetime(2012, 1, 1), '', True, 0.0), (t2.id, 't2', None, datetime.datetime(2012, 1, 1), '', True, 0.0), ])
def migrate1(migrator): """Миграция с версии 5.0 до 6.0""" migrate( migrator.drop_column(BotStatus._meta.db_table, 'last_top'), migrator.drop_column(BotStatus._meta.db_table, 'mail_data'), migrator.drop_column(User._meta.db_table, 'do_not_disturb'), migrator.drop_column(User._meta.db_table, 'memory'), migrator.add_column(User._meta.db_table, 'chatter_id', peewee.TextField(null=True)), migrator.add_column(User._meta.db_table, 'status', peewee.TextField(default="")), migrator.add_column(User._meta.db_table, 'status_locked_message', peewee.TextField(null=True)), )
def test_rename_unique_foreign_key(self): migrate(self.migrator.rename_column('session', 'user_id', 'huey_id')) columns = self.database.get_columns('session') self.assertEqual(sorted(column.name for column in columns), ['huey_id', 'id', 'updated_at']) foreign_keys = self.database.get_foreign_keys('session') self.assertEqual(len(foreign_keys), 1) foreign_key = foreign_keys[0] self.assertEqual(foreign_key.column, 'huey_id') self.assertEqual(foreign_key.dest_column, 'id') self.assertEqual(foreign_key.dest_table, 'users')
def test_drop_index(self): # Create a unique index. self.test_add_index() # Now drop the unique index. migrate( self.migrator.drop_index('person', 'person_first_name_last_name')) Person.create(first_name='first', last_name='last') query = (Person.select().where((Person.first_name == 'first') & (Person.last_name == 'last'))) self.assertEqual(query.count(), 2)
def test_add_column(self): # Create some fields with a variety of NULL / default values. df = DateTimeField(null=True) df_def = DateTimeField(default=datetime.datetime(2012, 1, 1)) cf = CharField(max_length=200, default='') bf = BooleanField(default=True) ff = FloatField(default=0) # Create two rows in the Tag table to test the handling of adding # non-null fields. t1 = Tag.create(tag='t1') t2 = Tag.create(tag='t2') # Convenience function for generating `add_column` migrations. add_column = partial(self.migrator.add_column, 'tag') # Run the migration. migrate( add_column('pub_date', df), add_column('modified_date', df_def), add_column('comment', cf), add_column('is_public', bf), add_column('popularity', ff)) # Create a new tag model to represent the fields we added. class NewTag(Model): tag = CharField() pub_date = df modified_date = df_def comment = cf is_public = bf popularity = ff class Meta: database = self.database table_name = Tag._meta.table_name query = (NewTag .select( NewTag.id, NewTag.tag, NewTag.pub_date, NewTag.modified_date, NewTag.comment, NewTag.is_public, NewTag.popularity) .order_by(NewTag.tag.asc())) # Verify the resulting rows are correct. self.assertEqual(list(query.tuples()), [ (t1.id, 't1', None, datetime.datetime(2012, 1, 1), '', True, 0.0), (t2.id, 't2', None, datetime.datetime(2012, 1, 1), '', True, 0.0), ])
def test_modify_fk_constraint(self): self.reset_sql_history() new_fk = ForeignKeyField(User, User.id, null=True, on_delete='CASCADE') migrate( self.migrator.drop_column('page', 'user_id'), self.migrator.add_column('page', 'user_id', new_fk)) queries = [x.msg for x in self.history] self.assertEqual(queries, [ # Get all columns for table. ('PRAGMA "main".table_info("page")', None), # Get the SQL used to generate the table and indexes. ('select name, sql from sqlite_master ' 'where type=? and LOWER(name)=?', ['table', 'page']), ('SELECT name, sql FROM "main".sqlite_master ' 'WHERE tbl_name = ? AND type = ? ORDER BY name', ('page', 'index')), # Get the indexes and indexed columns for the table. ('PRAGMA "main".index_list("page")', None), ('PRAGMA "main".index_info("page_name")', None), ('PRAGMA "main".index_info("page_user_id")', None), ('PRAGMA "main".foreign_key_list("page")', None), # Clear out a temp table and create it w/o the user_id FK. ('DROP TABLE IF EXISTS "page__tmp__"', []), ('CREATE TABLE "page__tmp__" (' '"id" INTEGER NOT NULL PRIMARY KEY, "name" VARCHAR(100))', []), # Copy data into the temp table, drop the original and rename # the temp -> original. Recreate index(es). ('INSERT INTO "page__tmp__" ("id", "name") ' 'SELECT "id", "name" FROM "page"', []), ('DROP TABLE "page"', []), ('ALTER TABLE "page__tmp__" RENAME TO "page"', []), ('CREATE UNIQUE INDEX "page_name" ON "page" ("name")', []), # Add new foreign-key field with appropriate constraint. ('ALTER TABLE "page" ADD COLUMN "user_id" VARCHAR(20) ' 'REFERENCES "users" ("id") ON DELETE CASCADE', []), ('CREATE INDEX "page_user_id" ON "page" ("user_id")', []), ]) self.database.pragma('foreign_keys', 1) huey = User.create(id='huey') huey_page = Page.create(user=huey, name='huey page') self.assertEqual(Page.select().count(), 1) # Deleting the user will cascade to the associated page. User.delete().where(User.id == 'huey').execute() self.assertEqual(Page.select().count(), 0)
def test_add_fk_with_constraints(self): self.reset_sql_history() field = ForeignKeyField(Category, Category.id, backref='children', null=True, on_delete='SET NULL') migrate(self.migrator.add_column( Category._meta.table_name, 'parent_id', field)) queries = [x.msg for x in self.history] self.assertEqual(queries, [ ('ALTER TABLE "category" ADD COLUMN "parent_id" ' 'INTEGER REFERENCES "category" ("id") ON DELETE SET NULL', []), ])
def main(): db = db_factory.get_instance() migrator = SqliteMigrator(db) genres = TextField(null=True) language_codes = TextField(null=True) with db: migrate( migrator.add_column('pimdbobject', 'genres', genres), migrator.add_column('pimdbobject', 'language_codes', language_codes), )
def foo(login): from playhouse.migrate import * from google.models import db init_user_database(login) migrator = SqliteMigrator(db) status_field = IntegerField(null=True) with db.transaction(): migrate( migrator.add_column('GooglePhoto', 'status', status_field), )
def test_rename_foreign_key(self): migrate(self.migrator.rename_column('page', 'user_id', 'huey_id')) columns = self.database.get_columns('page') self.assertEqual( sorted(column.name for column in columns), ['huey_id', 'id', 'name']) foreign_keys = self.database.get_foreign_keys('page') self.assertEqual(len(foreign_keys), 1) foreign_key = foreign_keys[0] self.assertEqual(foreign_key.column, 'huey_id') self.assertEqual(foreign_key.dest_column, 'id') self.assertEqual(foreign_key.dest_table, 'user')
def rollback(self): with self._database.atomic(): migrate( self._migrator.drop_column( table='buptuser', column_name='latest_xisu_checkin_data'), self._migrator.drop_column( table='buptuser', column_name='latest_xisu_checkin_response_data'), self._migrator.drop_column( table='buptuser', column_name='latest_xisu_checkin_response_time'), ) print(f'{__file__} rolled back')
def test_drop_not_null(self): self._create_people() migrate( self.migrator.drop_not_null('person', 'first_name'), self.migrator.drop_not_null('person', 'last_name')) p = Person.create(first_name=None, last_name=None) query = (Person .select() .where( (Person.first_name >> None) & (Person.last_name >> None))) self.assertEqual(query.count(), 1)
def test_multiple_operations(self): self.database.execute_sql('drop table if exists person_baze;') self.database.execute_sql('drop table if exists person_nugg;') self._create_people() field_n = CharField(null=True) field_d = CharField(default='test') operations = [ self.migrator.add_column('person', 'field_null', field_n), self.migrator.drop_column('person', 'first_name'), self.migrator.add_column('person', 'field_default', field_d), self.migrator.rename_table('person', 'person_baze'), self.migrator.rename_table('person_baze', 'person_nugg'), self.migrator.rename_column('person_nugg', 'last_name', 'last'), self.migrator.add_index('person_nugg', ('last',), True), ] migrate(*operations) class PersonNugg(Model): field_null = field_n field_default = field_d last = CharField() dob = DateField(null=True) class Meta: database = self.database table_name = 'person_nugg' people = (PersonNugg .select( PersonNugg.field_null, PersonNugg.field_default, PersonNugg.last, PersonNugg.dob) .order_by(PersonNugg.last) .tuples()) expected = [ (None, 'test', 'Dog', datetime.date(2008, 6, 1)), (None, 'test', 'Kitty', datetime.date(2011, 5, 1)), (None, 'test', 'Leifer', None), ] self.assertEqual(list(people), expected) with self.database.transaction(): self.assertRaises( IntegrityError, PersonNugg.create, last='Leifer', field_default='bazer') self.database.execute_sql('drop table person_nugg;')
def test_index_preservation(self): with count_queries() as qc: migrate(self.migrator.rename_column("indexmodel", "first_name", "first")) queries = [log.msg for log in qc.get_queries()] self.assertEqual( queries, [ # Get all the columns. ('PRAGMA table_info("indexmodel")', None), # Get the table definition. ("select name, sql from sqlite_master " "where type=? and LOWER(name)=?", ["table", "indexmodel"]), # Get the indexes and indexed columns for the table. ( "SELECT name, sql FROM sqlite_master " "WHERE tbl_name = ? AND type = ? ORDER BY name", ("indexmodel", "index"), ), ('PRAGMA index_list("indexmodel")', None), ('PRAGMA index_info("indexmodel_data")', None), ('PRAGMA index_info("indexmodel_first_name_last_name")', None), # Get foreign keys. ('PRAGMA foreign_key_list("indexmodel")', None), # Drop any temporary table, if it exists. ('DROP TABLE IF EXISTS "indexmodel__tmp__"', []), # Create a temporary table with the renamed column. ( 'CREATE TABLE "indexmodel__tmp__" (' '"id" INTEGER NOT NULL PRIMARY KEY, ' '"first" VARCHAR(255) NOT NULL, ' '"last_name" VARCHAR(255) NOT NULL, ' '"data" INTEGER NOT NULL)', [], ), # Copy data from original table into temporary table. ( 'INSERT INTO "indexmodel__tmp__" ' '("id", "first", "last_name", "data") ' 'SELECT "id", "first_name", "last_name", "data" ' 'FROM "indexmodel"', [], ), # Drop the original table. ('DROP TABLE "indexmodel"', []), # Rename the temporary table, replacing the original. ('ALTER TABLE "indexmodel__tmp__" RENAME TO "indexmodel"', []), # Re-create the indexes. ('CREATE UNIQUE INDEX "indexmodel_data" ' 'ON "indexmodel" ("data")', []), ('CREATE UNIQUE INDEX "indexmodel_first_last_name" ' 'ON "indexmodel" ("first", "last_name")', []), ], )
def test_index_preservation(self): with count_queries() as qc: migrate( self.migrator.rename_column('indexmodel', 'first_name', 'first')) queries = [log.msg for log in qc.get_queries()] self.assertEqual( queries, [ # Get the table definition. ('select name, sql from sqlite_master ' 'where type=? and LOWER(name)=?', ['table', 'indexmodel']), # Get the indexes and indexed columns for the table. ('SELECT name, sql FROM sqlite_master ' 'WHERE tbl_name = ? AND type = ? ORDER BY name', ('indexmodel', 'index')), ('PRAGMA index_list("indexmodel")', None), ('PRAGMA index_info("indexmodel_data")', None), ('PRAGMA index_info("indexmodel_first_name_last_name")', None), # Drop any temporary table, if it exists. ('DROP TABLE IF EXISTS "indexmodel__tmp__"', []), # Create a temporary table with the renamed column. ('CREATE TABLE "indexmodel__tmp__" (' '"id" INTEGER NOT NULL PRIMARY KEY, ' '"first" VARCHAR(255) NOT NULL, ' '"last_name" VARCHAR(255) NOT NULL, ' '"data" INTEGER NOT NULL)', []), # Copy data from original table into temporary table. ('INSERT INTO "indexmodel__tmp__" ' '("id", "first", "last_name", "data") ' 'SELECT "id", "first_name", "last_name", "data" ' 'FROM "indexmodel"', []), # Drop the original table. ('DROP TABLE "indexmodel"', []), # Rename the temporary table, replacing the original. ('ALTER TABLE "indexmodel__tmp__" RENAME TO "indexmodel"', []), # Re-create the indexes. ('CREATE UNIQUE INDEX "indexmodel_data" ' 'ON "indexmodel" ("data")', []), ('CREATE UNIQUE INDEX "indexmodel_first_last_name" ' 'ON "indexmodel" ("first", "last_name")', []) ])
def performMigration(): field = ForeignKeyField(Individual, field=Individual.id, null=True) paternal = ForeignKeyField(Individual, null=True, backref="paternalInFamilies") maternal = ForeignKeyField(Individual, null=True, backref="maternalInFamilies") relation = TextField() migrator = SqliteMigrator(db) migrate(migrator.drop_column('rule', "relation_id"), migrator.add_column('rule', "relation", relation))
def migrate_v1(db): migrator = SqliteMigrator(db) table_name = 'ExchangeTrades' # amount_src = DecimalField(max_digits=15, decimal_places=8) amount_trg = DecimalField(max_digits=15, decimal_places=8, null=True) extrema_time = IntegerField(null=True) min_sell_margin = DecimalField(max_digits=15, decimal_places=8, null=True) migrate( migrator.rename_column(table_name, 'amount_src', 'amount_src'), migrator.add_column(table_name, 'amount_trg', amount_trg), migrator.add_column(table_name, 'extrema_time', extrema_time), migrator.add_column(table_name, 'min_sell_margin', min_sell_margin))
def test_add_column_indexed_table(self): # Ensure that columns can be added to tables that have indexes. field = CharField(default='') migrate(self.migrator.add_column('indexmodel', 'foo', field)) db = self.migrator.database columns = db.get_columns('indexmodel') self.assertEqual(sorted(column.name for column in columns), ['data', 'first_name', 'foo', 'id', 'last_name']) indexes = db.get_indexes('indexmodel') self.assertEqual( sorted((index.name, index.columns) for index in indexes), [('indexmodel_data', ['data']), ('indexmodel_first_name_last_name', ['first_name', 'last_name'])])
def initialize(): DATABASE.connect() DATABASE.create_tables( [User, Post, Tag, Comment, PostVotes, CommentVotes, PostTags], safe=True) migrate( # Make `posts` allow NULL values. # migrator.drop_not_null('post', 'last_modified'), # migrator.drop_not_null('comment', 'last_modified') # migrator.add_column('user', 'email', User.email), # migrator.add_column('user', 'password', User.password), ) DATABASE.close()
def migrate_database_schema(): ''' Migrate database schema from previous versions (0.9.4 and up) ''' drop_table_migrations, column_migrations = [], [] # Version 0.9.4 ---------------------------------------------------------- # Change columns if Feed.field_exists('icon_id') and engine != 'sqlite': column_migrations.append(migrator.drop_column('feeds', 'icon_id')) if not Feed.field_exists('icon'): column_migrations.append( migrator.add_column('feeds', 'icon', Feed.icon)) if not Feed.field_exists('icon_last_updated_on'): column_migrations.append( migrator.add_column('feeds', 'icon_last_updated_on', Feed.icon_last_updated_on)) if not Entry.field_exists('content_type'): column_migrations.append( migrator.add_column('entries', 'content_type', Entry.content_type)) if not Entry.field_exists('fulltext'): column_migrations.append( migrator.add_column('entries', 'fulltext', Entry.fulltext)) # Drop tables if Icon.table_exists(): drop_table_migrations.append(Icon.drop_table) # ---------------------------------------------------------------------------- # Run all table and column migrations if column_migrations: # Let caller to catch any OperationalError's migrate(*column_migrations) for drop in drop_table_migrations: drop() # True if at least one is non-empty return drop_table_migrations or column_migrations
class TableMovies(BaseModel): alternative_titles = TextField(column_name='alternativeTitles', null=True) audio_codec = TextField(null=True) audio_language = TextField(null=True) failed_attempts = TextField(column_name='failedAttempts', null=True) fanart = TextField(null=True) forced = TextField(null=True) format = TextField(null=True) hearing_impaired = TextField(null=True) imdb_id = TextField(column_name='imdbId', null=True) languages = TextField(null=True) missing_subtitles = TextField(null=True) monitored = TextField(null=True) overview = TextField(null=True) path = TextField(unique=True) poster = TextField(null=True) radarr_id = IntegerField(column_name='radarrId', null=False, unique=True) resolution = TextField(null=True) scene_name = TextField(column_name='sceneName', null=True) sort_title = TextField(column_name='sortTitle', null=True) subtitles = TextField(null=True) title = TextField(null=False) tmdb_id = TextField(column_name='tmdbId', primary_key=True, null=False) video_codec = TextField(null=True) year = TextField(null=True) movie_file_id = IntegerField(null=True) migrate( migrator.add_column('table_movies', 'movie_file_id', movie_file_id), ) class Meta: table_name = 'table_movies'
class TableShows(BaseModel): alternate_titles = TextField(column_name='alternateTitles', null=True) audio_language = TextField(null=True) fanart = TextField(null=True) forced = TextField(null=True, constraints=[SQL('DEFAULT "False"')]) hearing_impaired = TextField(null=True) languages = TextField(null=True) overview = TextField(null=True) path = TextField(null=False, unique=True) poster = TextField(null=True) sonarr_series_id = IntegerField(column_name='sonarrSeriesId', null=True, unique=True) sort_title = TextField(column_name='sortTitle', null=True) title = TextField(null=True) tvdb_id = IntegerField(column_name='tvdbId', null=True, unique=True, primary_key=True) year = TextField(null=True) migrate(migrator.add_column('table_shows', 'forced', forced), ) class Meta: table_name = 'table_shows'
def run_update(): '''修改表''' sync_db.connect() migrator = MySQLMigrator(sync_db) # 由于peewee没办法像Django ORM那样迁移数据,因此如果在表创建好了之后还要对表字段做操作,就必须依靠peewee的migrate来操作了 # 具体文档:http://docs.peewee-orm.com/en/latest/peewee/playhouse.html?highlight=migrate#example-usage # 下面的示例是用来修改表字段的名称,将多个表的add_time字段改为create_time字段 with sync_db.atomic(): migrate( migrator.rename_column('userprofile', 'add_time', 'create_time'), migrator.rename_column('verifyemailcode', 'add_time', 'create_time'), migrator.rename_column('category', 'add_time', 'create_time'), migrator.rename_column('post', 'add_time', 'create_time'), ) sync_db.close()
class TableEpisodes(BaseModel): audio_codec = TextField(null=True) episode = IntegerField(null=False) failed_attempts = TextField(column_name='failedAttempts', null=True) format = TextField(null=True) missing_subtitles = TextField(null=True) monitored = TextField(null=True) path = TextField(null=False) resolution = TextField(null=True) scene_name = TextField(null=True) season = IntegerField(null=False) sonarr_episode_id = IntegerField(column_name='sonarrEpisodeId', unique=True, null=False) sonarr_series_id = ForeignKeyField(TableShows, field='sonarr_series_id', column_name='sonarrSeriesId', null=False) subtitles = TextField(null=True) title = TextField(null=True) video_codec = TextField(null=True) episode_file_id = IntegerField(null=True) migrate( migrator.add_column('table_episodes', 'episode_file_id', episode_file_id), ) class Meta: table_name = 'table_episodes' primary_key = False
def test_add_unique(self): alt_id = IntegerField(default=0) migrate(self.migrator.add_column('tag', 'alt_id', alt_id), self.migrator.add_unique('tag', 'alt_id')) class Tag2(Model): tag = CharField() alt_id = IntegerField(default=0) class Meta: database = self.database table_name = Tag._meta.table_name Tag2.create(tag='t1', alt_id=1) with self.database.atomic(): self.assertRaises(IntegrityError, Tag2.create, tag='t2', alt_id=1)
def migrate(self): with self._database.atomic(): migrate( self._migrator.add_column( table='buptuser', column_name='latest_xisu_checkin_data', field=BUPTUser.latest_xisu_checkin_data), self._migrator.add_column( table='buptuser', column_name='latest_xisu_checkin_response_data', field=BUPTUser.latest_xisu_checkin_response_data), self._migrator.add_column( table='buptuser', column_name='latest_xisu_checkin_response_time', field=BUPTUser.latest_xisu_checkin_response_time), ) print(f'{__file__} migrated')
def _migrate(i): """ Run migrations. Args: i: Migration ID Returns: False: when migration ID is not found """ if i == 0: # Migrate 0: Added Time column in MsgLog table. # 2016JUN15 migrator = SqliteMigrator(db) migrate(migrator.add_column("msglog", "time", DateTimeField(default=datetime.datetime.now, null=True))) else: return False
def test_add_table_constraint(self): price = FloatField(default=0.) migrate(self.migrator.add_column('tag', 'price', price), self.migrator.add_constraint('tag', 'price_check', Check('price >= 0'))) class Tag2(Model): tag = CharField() price = FloatField(default=0.) class Meta: database = self.database table_name = Tag._meta.table_name with self.database.atomic(): self.assertRaises(IntegrityError, Tag2.create, tag='t1', price=-1) Tag2.create(tag='t1', price=1.0) t1_db = Tag2.get(Tag2.tag == 't1') self.assertEqual(t1_db.price, 1.0)
def check_config(project_db): conn = lib.open_db(project_db) if lib.exists_table(conn, 'project_config'): config_cols = lib.get_column_names(conn, 'project_config') col_names = [v['name'] for v in config_cols] if 'output_last_imported' not in col_names: migrator = SqliteMigrator(SqliteDatabase(project_db)) migrate( migrator.add_column('project_config', 'output_last_imported', DateTimeField(null=True)), migrator.add_column('project_config', 'imported_gis', BooleanField(default=False)), migrator.add_column('project_config', 'is_lte', BooleanField(default=False)), ) if lib.exists_table(conn, 'plants_plt'): lib.delete_table(project_db, 'plants_plt')
def run_migration(): if type(db) == peewee.SqliteDatabase: migrator = SqliteMigrator(db) elif type(db) == peewee.MySQLDatabase: migrator = MySQLMigrator(db) elif type(db) == peewee.PostgresqlDatabase: migrator = PostgresqlMigrator(db) else: return migrate( migrator.add_column('report', 'first_report_datetime', peewee.DateTimeField(default=UTC_now)), migrator.drop_column('report', 'is_open'), ) query = Report.select() for report in query: report.first_report_datetime = report.datetime report.save()
def test_drop_column(self): self._create_people() migrate( self.migrator.drop_column('person', 'last_name'), self.migrator.drop_column('person', 'dob')) column_names = self.get_column_names('person') self.assertEqual(column_names, set(['id', 'first_name'])) User.create(id='charlie', password='******') User.create(id='huey', password='******') migrate(self.migrator.drop_column('users', 'password')) column_names = self.get_column_names('users') self.assertEqual(column_names, set(['id'])) data = [row for row in User.select(User.id).order_by(User.id).tuples()] self.assertEqual(data, [ ('charlie',), ('huey',),])