def upgrade_5(session, metadata): """ Version 5 upgrade. This upgrade adds support for multiple songbooks """ op = get_upgrade_op(session) songs_table = Table('songs', metadata) if 'song_book_id' in [col.name for col in songs_table.c.values()]: log.warning('Skipping upgrade_5 step of upgrading the song db') return # Create the mapping table (songs <-> songbooks) op.create_table('songs_songbooks', Column('songbook_id', types.Integer(), ForeignKey('song_books.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('entry', types.Unicode(255), primary_key=True, nullable=False)) # Migrate old data op.execute('INSERT INTO songs_songbooks SELECT song_book_id, id, song_number FROM songs\ WHERE song_book_id IS NOT NULL AND song_number IS NOT NULL') # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'songs', ['song_book_id', 'song_number']) else: op.drop_constraint('songs_ibfk_1', 'songs', 'foreignkey') op.drop_column('songs', 'song_book_id') op.drop_column('songs', 'song_number')
def upgrade_2(session, metadata): """ Version 2 upgrade - Move file path from old db to JSON encoded path to new db. Added during 2.5 dev """ log.debug('Starting upgrade_2 for file_path to JSON') old_table = Table('image_filenames', metadata, autoload=True) if 'file_path' not in [col.name for col in old_table.c.values()]: op = get_upgrade_op(session) op.add_column('image_filenames', Column('file_path', PathType())) conn = op.get_bind() results = conn.execute('SELECT * FROM image_filenames') data_path = AppLocation.get_data_path() for row in results.fetchall(): file_path_json = json.dumps(Path(row.filename), cls=OpenLPJsonEncoder, base_path=data_path) sql = 'UPDATE image_filenames SET file_path = \'{file_path_json}\' WHERE id = {id}'.format( file_path_json=file_path_json, id=row.id) conn.execute(sql) # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'image_filenames', [ 'filename', ]) else: op.drop_constraint('image_filenames', 'foreignkey') op.drop_column('image_filenames', 'filenames')
def upgrade_5(session, metadata): """ Version 5 upgrade. This upgrade adds support for multiple songbooks """ op = get_upgrade_op(session) songs_table = Table('songs', metadata) if 'song_book_id' in [col.name for col in songs_table.c.values()]: log.warning('Skipping upgrade_5 step of upgrading the song db') return # Create the mapping table (songs <-> songbooks) op.create_table('songs_songbooks', Column('songbook_id', types.Integer(), ForeignKey('song_books.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('entry', types.Unicode(255), primary_key=True, nullable=False)) # Migrate old data op.execute('INSERT INTO songs_songbooks SELECT song_book_id, id, song_number FROM songs\ WHERE song_book_id IS NOT NULL AND song_number IS NOT NULL') # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'songs', ['song_book_id', 'song_number']) else: op.drop_constraint('songs_ibfk_1', 'songs', 'foreignkey') op.drop_column('songs', 'song_book_id') op.drop_column('songs', 'song_number')
def upgrade_6(session, metadata): """ Version 6 upgrade This version corrects the errors in upgrades 4 and 5 """ op = get_upgrade_op(session) metadata.reflect() # Move upgrade 4 to here and correct it (authors_songs table, not songs table) authors_songs = Table('authors_songs', metadata, autoload=True) if 'author_type' not in [col.name for col in authors_songs.c.values()]: # Since SQLite doesn't support changing the primary key of a table, we need to recreate the table # and copy the old values op.create_table( 'authors_songs_tmp', Column('author_id', types.Integer(), ForeignKey('authors.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('author_type', types.Unicode(255), primary_key=True, nullable=False, server_default=text('""')) ) op.execute('INSERT INTO authors_songs_tmp SELECT author_id, song_id, "" FROM authors_songs') op.drop_table('authors_songs') op.rename_table('authors_songs_tmp', 'authors_songs') # Move upgrade 5 here to correct it if 'songs_songbooks' not in [t.name for t in metadata.tables.values()]: # Create the mapping table (songs <-> songbooks) op.create_table( 'songs_songbooks', Column('songbook_id', types.Integer(), ForeignKey('song_books.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('entry', types.Unicode(255), primary_key=True, nullable=False) ) # Migrate old data op.execute('INSERT INTO songs_songbooks SELECT song_book_id, id, song_number FROM songs\ WHERE song_book_id IS NOT NULL AND song_number IS NOT NULL AND song_book_id <> 0') # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'songs', ['song_book_id', 'song_number']) else: op.drop_constraint('songs_ibfk_1', 'songs', 'foreignkey') op.drop_column('songs', 'song_book_id') op.drop_column('songs', 'song_number') # Finally, clean up our mess in people's databases op.execute('DELETE FROM songs_songbooks WHERE songbook_id = 0')
def test_delete_columns(self): """ Test deleting multiple columns in a table """ # GIVEN: A temporary song db # WHEN: Deleting a columns in a table drop_columns(self.op, 'songs', ['song_book_id', 'song_number']) # THEN: The columns should have been deleted meta = sqlalchemy.MetaData(bind=self.op.get_bind()) meta.reflect() columns = meta.tables['songs'].columns for column in columns: if column.name == 'song_book_id' or column.name == 'song_number': self.fail("The column '%s' should have been deleted." % column.name)
def test_delete_columns(self): """ Test deleting multiple columns in a table """ # GIVEN: A temporary song db # WHEN: Deleting a columns in a table drop_columns(self.op, 'songs', ['song_book_id', 'song_number']) # THEN: The columns should have been deleted meta = sqlalchemy.MetaData(bind=self.op.get_bind()) meta.reflect() columns = meta.tables['songs'].columns for column in columns: if column.name == 'song_book_id' or column.name == 'song_number': self.fail("The column '%s' should have been deleted." % column.name)