def upgrade_5(session, metadata): """ Version 5 upgrade. This upgrade adds support for multiple songbooks """ op = get_upgrade_op(session) songs_table = Table('songs', metadata) if 'song_book_id' in [col.name for col in songs_table.c.values()]: log.warning('Skipping upgrade_5 step of upgrading the song db') return # Create the mapping table (songs <-> songbooks) op.create_table('songs_songbooks', Column('songbook_id', types.Integer(), ForeignKey('song_books.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('entry', types.Unicode(255), primary_key=True, nullable=False)) # Migrate old data op.execute('INSERT INTO songs_songbooks SELECT song_book_id, id, song_number FROM songs\ WHERE song_book_id IS NOT NULL AND song_number IS NOT NULL') # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'songs', ['song_book_id', 'song_number']) else: op.drop_constraint('songs_ibfk_1', 'songs', 'foreignkey') op.drop_column('songs', 'song_book_id') op.drop_column('songs', 'song_number')
def upgrade_1(session, metadata): """ Version 1 upgrade. This upgrade removes the many-to-many relationship between songs and media_files and replaces it with a one-to-many, which is far more representative of the real relationship between the two entities. In order to facilitate this one-to-many relationship, a song_id column is added to the media_files table, and a weight column so that the media files can be ordered. :param session: :param metadata: """ op = get_upgrade_op(session) songs_table = Table('songs', metadata, autoload=True) if 'media_files_songs' in [t.name for t in metadata.tables.values()]: op.drop_table('media_files_songs') op.add_column('media_files', Column('song_id', types.Integer(), server_default=null())) op.add_column('media_files', Column('weight', types.Integer(), server_default=text('0'))) if metadata.bind.url.get_dialect().name != 'sqlite': # SQLite doesn't support ALTER TABLE ADD CONSTRAINT op.create_foreign_key('fk_media_files_song_id', 'media_files', 'songs', ['song_id', 'id']) else: log.warning('Skipping upgrade_1 step of upgrading the song db')
def test_get_upgrade_op(self): """ Test that the ``get_upgrade_op`` function creates a MigrationContext and an Operations object """ # GIVEN: Mocked out alembic classes and a mocked out SQLAlchemy session object with patch('openlp.core.lib.db.MigrationContext') as MockedMigrationContext, \ patch('openlp.core.lib.db.Operations') as MockedOperations: mocked_context = MagicMock() mocked_op = MagicMock() mocked_connection = MagicMock() MockedMigrationContext.configure.return_value = mocked_context MockedOperations.return_value = mocked_op mocked_session = MagicMock() mocked_session.bind.connect.return_value = mocked_connection # WHEN: get_upgrade_op is executed with the mocked session object op = get_upgrade_op(mocked_session) # THEN: The op object should be mocked_op, and the correction function calls should have been made self.assertIs(op, mocked_op, 'The return value should be the mocked object') mocked_session.bind.connect.assert_called_with() MockedMigrationContext.configure.assert_called_with( mocked_connection) MockedOperations.assert_called_with(mocked_context)
def upgrade_2(session, metadata): """ Version 2 upgrade - Move file path from old db to JSON encoded path to new db. Added during 2.5 dev """ log.debug('Starting upgrade_2 for file_path to JSON') old_table = Table('image_filenames', metadata, autoload=True) if 'file_path' not in [col.name for col in old_table.c.values()]: op = get_upgrade_op(session) op.add_column('image_filenames', Column('file_path', PathType())) conn = op.get_bind() results = conn.execute('SELECT * FROM image_filenames') data_path = AppLocation.get_data_path() for row in results.fetchall(): file_path_json = json.dumps(Path(row.filename), cls=OpenLPJsonEncoder, base_path=data_path) sql = 'UPDATE image_filenames SET file_path = \'{file_path_json}\' WHERE id = {id}'.format( file_path_json=file_path_json, id=row.id) conn.execute(sql) # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'image_filenames', [ 'filename', ]) else: op.drop_constraint('image_filenames', 'foreignkey') op.drop_column('image_filenames', 'filenames')
def upgrade_1(session, metadata): """ Version 1 upgrade. This upgrade removes the many-to-many relationship between songs and media_files and replaces it with a one-to-many, which is far more representative of the real relationship between the two entities. In order to facilitate this one-to-many relationship, a song_id column is added to the media_files table, and a weight column so that the media files can be ordered. :param session: :param metadata: """ op = get_upgrade_op(session) metadata.reflect() if 'media_files_songs' in [t.name for t in metadata.tables.values()]: op.drop_table('media_files_songs') op.add_column('media_files', Column('song_id', types.Integer(), server_default=null())) op.add_column('media_files', Column('weight', types.Integer(), server_default=text('0'))) if metadata.bind.url.get_dialect().name != 'sqlite': # SQLite doesn't support ALTER TABLE ADD CONSTRAINT op.create_foreign_key('fk_media_files_song_id', 'media_files', 'songs', ['song_id', 'id']) else: log.warning('Skipping upgrade_1 step of upgrading the song db')
def upgrade_4(session, metadata): """ Version 4 upgrade. This upgrade adds a column for author type to the authors_songs table """ # Due to an incorrect check, this step was always skipped. Moved this code into upgrade 6. op = get_upgrade_op(session) authors_songs = Table('authors_songs', metadata, autoload=True) if 'author_type' not in [col.name for col in authors_songs.c.values()]: authors_songs_tmp = op.create_table( 'authors_songs_tmp', Column('author_id', types.Integer(), ForeignKey('authors.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('author_type', types.Unicode(255), primary_key=True, nullable=False, server_default=text('""'))) op.execute( 'INSERT INTO authors_songs_tmp SELECT author_id, song_id, "" FROM authors_songs' ) op.drop_table('authors_songs') op.rename_table('authors_songs_tmp', 'authors_songs') else: log.warning('Skipping upgrade_4 step of upgrading the song db')
def upgrade_2(session, metadata): """ Version 2 upgrade. Update Projector() table to include new data defined in PJLink version 2 changes mac_adx: Column(String(18)) serial_no: Column(String(30)) sw_version: Column(String(30)) model_filter: Column(String(30)) model_lamp: Column(String(30)) :param session: DB session instance :param metadata: Metadata of current DB """ log.debug('Checking projector DB upgrade to version 2') projector_table = Table('projector', metadata, autoload=True) upgrade_db = 'mac_adx' not in [col.name for col in projector_table.c.values()] if upgrade_db: new_op = get_upgrade_op(session) new_op.add_column('projector', Column('mac_adx', types.String(18), server_default=null())) new_op.add_column('projector', Column('serial_no', types.String(30), server_default=null())) new_op.add_column('projector', Column('sw_version', types.String(30), server_default=null())) new_op.add_column('projector', Column('model_filter', types.String(30), server_default=null())) new_op.add_column('projector', Column('model_lamp', types.String(30), server_default=null())) log.debug('{status} projector DB upgrade to version 2'.format(status='Updated' if upgrade_db else 'Skipping'))
def upgrade_4(session, metadata): """ Version 4 upgrade. This upgrade adds a column for author type to the authors_songs table """ # Since SQLite doesn't support changing the primary key of a table, we need to recreate the table # and copy the old values op = get_upgrade_op(session) songs_table = Table('songs', metadata) if 'author_type' not in [col.name for col in songs_table.c.values()]: op.create_table( 'authors_songs_tmp', Column('author_id', types.Integer(), ForeignKey('authors.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('author_type', types.Unicode(255), primary_key=True, nullable=False, server_default=text('""'))) op.execute( 'INSERT INTO authors_songs_tmp SELECT author_id, song_id, "" FROM authors_songs' ) op.drop_table('authors_songs') op.rename_table('authors_songs_tmp', 'authors_songs') else: log.warning('Skipping upgrade_4 step of upgrading the song db')
def upgrade_2(session, metadata): """ Remove the individual proxy settings, after the implementation of central proxy settings. Added in 2.5 (3.0 development) """ settings = Registry().get('settings') op = get_upgrade_op(session) metadata_table = Table('metadata', metadata, autoload=True) proxy, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'proxy_server')).first() or ('', ) if proxy and not \ (proxy == settings.value('advanced/proxy http') or proxy == settings.value('advanced/proxy https')): http_proxy = '' https_proxy = '' name, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'name')).first() msg_box = QtWidgets.QMessageBox() msg_box.setText( translate( 'BiblesPlugin', f'The proxy server {proxy} was found in the bible {name}.<br>' f'Would you like to set it as the proxy for OpenLP?')) msg_box.setIcon(QtWidgets.QMessageBox.Question) msg_box.addButton(QtWidgets.QMessageBox.No) http_button = msg_box.addButton('http', QtWidgets.QMessageBox.ActionRole) both_button = msg_box.addButton(translate('BiblesPlugin', 'both'), QtWidgets.QMessageBox.ActionRole) https_button = msg_box.addButton('https', QtWidgets.QMessageBox.ActionRole) msg_box.setDefaultButton(both_button) msg_box.exec() clicked_button = msg_box.clickedButton() if clicked_button in [http_button, both_button]: http_proxy = proxy settings.setValue('advanced/proxy http', proxy) if clicked_button in [https_button, both_button]: https_proxy = proxy settings.setValue('advanced/proxy https', proxy) if http_proxy or https_proxy: username, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'proxy_username')).first() proxy, = session.execute( select([metadata_table.c.value], metadata_table.c.key == 'proxy_password')).first() settings.setValue('advanced/proxy username', username) settings.setValue('advanced/proxy password', proxy) settings.setValue('advanced/proxy mode', ProxyMode.MANUAL_PROXY) op.execute(delete(metadata_table, metadata_table.c.key == 'proxy_server')) op.execute(delete(metadata_table, metadata_table.c.key == 'proxy_username')) op.execute(delete(metadata_table, metadata_table.c.key == 'proxy_password'))
def upgrade_1(session, metadata): """ Version 1 upgrade. This upgrade adds two new fields to the songusage database """ op = get_upgrade_op(session) op.add_column('songusage_data', Column('plugin_name', types.Unicode(20), server_default='')) op.add_column('songusage_data', Column('source', types.Unicode(10), server_default=''))
def setUp(self): """ Create temp folder for keeping db file """ self.tmp_folder = mkdtemp() db_path = os.path.join(TEST_RESOURCES_PATH, 'songs', 'songs-1.9.7.sqlite') self.db_tmp_path = os.path.join(self.tmp_folder, 'songs-1.9.7.sqlite') shutil.copyfile(db_path, self.db_tmp_path) db_url = 'sqlite:///' + self.db_tmp_path self.session, metadata = init_db(db_url) self.op = get_upgrade_op(self.session)
def upgrade_6(session, metadata): """ Version 6 upgrade. This upgrade adds a chords field to the songs table """ op = get_upgrade_op(session) songs_table = Table('songs', metadata, autoload=True) if 'chords' not in [col.name for col in songs_table.c.values()]: op.add_column('songs', Column('chords', types.UnicodeText)) else: log.warning('Skipping upgrade_6 step of upgrading the song db')
def upgrade_1(session, metadata): """ Version 1 upgrade. This upgrade adds two new fields to the songusage database :param session: SQLAlchemy Session object :param metadata: SQLAlchemy MetaData object """ op = get_upgrade_op(session) op.add_column('songusage_data', Column('plugin_name', types.Unicode(20), server_default='')) op.add_column('songusage_data', Column('source', types.Unicode(10), server_default=''))
def upgrade_2(session, metadata): """ Version 2 upgrade. This upgrade adds a create_date and last_modified date to the songs table """ op = get_upgrade_op(session) songs_table = Table('songs', metadata, autoload=True) if 'create_date' not in [col.name for col in songs_table.c.values()]: op.add_column('songs', Column('create_date', types.DateTime(), default=func.now())) op.add_column('songs', Column('last_modified', types.DateTime(), default=func.now())) else: log.warning('Skipping upgrade_2 step of upgrading the song db')
def upgrade_5(session, metadata): """ Version 5 upgrade. This upgrade adds a song_key and transpose_by field to the songs table """ op = get_upgrade_op(session) songs_table = Table('songs', metadata, autoload=True) if 'song_key' not in [col.name for col in songs_table.c.values()]: op.add_column('songs', Column('song_key', types.Unicode(3))) op.add_column('songs', Column('transpose_by', types.Integer(), default=0)) else: log.warning('Skipping upgrade_5 step of upgrading the song db')
def upgrade_3(session, metadata): """ Version 3 upgrade. This upgrade adds a temporary song flag to the songs table """ op = get_upgrade_op(session) songs_table = Table('songs', metadata, autoload=True) if 'temporary' not in [col.name for col in songs_table.c.values()]: if metadata.bind.url.get_dialect().name == 'sqlite': op.add_column('songs', Column('temporary', types.Boolean(create_constraint=False), server_default=false())) else: op.add_column('songs', Column('temporary', types.Boolean(), server_default=false())) else: log.warning('Skipping upgrade_3 step of upgrading the song db')
def upgrade_6(session, metadata): """ Version 6 upgrade This version corrects the errors in upgrades 4 and 5 """ op = get_upgrade_op(session) metadata.reflect() # Move upgrade 4 to here and correct it (authors_songs table, not songs table) authors_songs = Table('authors_songs', metadata, autoload=True) if 'author_type' not in [col.name for col in authors_songs.c.values()]: # Since SQLite doesn't support changing the primary key of a table, we need to recreate the table # and copy the old values op.create_table( 'authors_songs_tmp', Column('author_id', types.Integer(), ForeignKey('authors.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('author_type', types.Unicode(255), primary_key=True, nullable=False, server_default=text('""')) ) op.execute('INSERT INTO authors_songs_tmp SELECT author_id, song_id, "" FROM authors_songs') op.drop_table('authors_songs') op.rename_table('authors_songs_tmp', 'authors_songs') # Move upgrade 5 here to correct it if 'songs_songbooks' not in [t.name for t in metadata.tables.values()]: # Create the mapping table (songs <-> songbooks) op.create_table( 'songs_songbooks', Column('songbook_id', types.Integer(), ForeignKey('song_books.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('entry', types.Unicode(255), primary_key=True, nullable=False) ) # Migrate old data op.execute('INSERT INTO songs_songbooks SELECT song_book_id, id, song_number FROM songs\ WHERE song_book_id IS NOT NULL AND song_number IS NOT NULL AND song_book_id <> 0') # Drop old columns if metadata.bind.url.get_dialect().name == 'sqlite': drop_columns(op, 'songs', ['song_book_id', 'song_number']) else: op.drop_constraint('songs_ibfk_1', 'songs', 'foreignkey') op.drop_column('songs', 'song_book_id') op.drop_column('songs', 'song_number') # Finally, clean up our mess in people's databases op.execute('DELETE FROM songs_songbooks WHERE songbook_id = 0')
def upgrade_2(session, metadata): """ Version 2 upgrade. This upgrade adds two new fields to the songusage database :param session: SQLAlchemy Session object :param metadata: SQLAlchemy MetaData object """ op = get_upgrade_op(session) songusage_table = Table('songusage_data', metadata, autoload=True) if 'plugin_name' not in [col.name for col in songusage_table.c.values()]: op.add_column( 'songusage_data', Column('plugin_name', types.Unicode(20), server_default='')) op.add_column('songusage_data', Column('source', types.Unicode(10), server_default=''))
def upgrade_3(session, metadata): """ Version 3 upgrade. Update Projector() table to inlcude PJLink class as part of record. pjlink_version: Column(String(1)) :param session: DB Session instance :param metadata: Metadata of current DB """ log.debug('Checking projector DB upgrade to version 3') projector_table = Table('projector', metadata, autoload=True) upgrade_db = 'pjlink_class' not in [col.name for col in projector_table.c.values()] if upgrade_db: new_op = get_upgrade_op(session) new_op.add_column('projector', Column('pjlink_class', types.String(5), server_default=null())) log.debug('{status} projector DB upgrade to version 3'.format(status='Updated' if upgrade_db else 'Skipping'))
def upgrade_4(session, metadata): """ Version 4 upgrade. This upgrade adds a column for author type to the authors_songs table """ # Since SQLite doesn't support changing the primary key of a table, we need to recreate the table # and copy the old values op = get_upgrade_op(session) songs_table = Table('songs', metadata) if 'author_type' not in [col.name for col in songs_table.c.values()]: op.create_table('authors_songs_tmp', Column('author_id', types.Integer(), ForeignKey('authors.id'), primary_key=True), Column('song_id', types.Integer(), ForeignKey('songs.id'), primary_key=True), Column('author_type', types.Unicode(255), primary_key=True, nullable=False, server_default=text('""'))) op.execute('INSERT INTO authors_songs_tmp SELECT author_id, song_id, "" FROM authors_songs') op.drop_table('authors_songs') op.rename_table('authors_songs_tmp', 'authors_songs') else: log.warning('Skipping upgrade_4 step of upgrading the song db')
def get_upgrade_op_test(self): """ Test that the ``get_upgrade_op`` function creates a MigrationContext and an Operations object """ # GIVEN: Mocked out alembic classes and a mocked out SQLAlchemy session object with patch('openlp.core.lib.db.MigrationContext') as MockedMigrationContext, \ patch('openlp.core.lib.db.Operations') as MockedOperations: mocked_context = MagicMock() mocked_op = MagicMock() mocked_connection = MagicMock() MockedMigrationContext.configure.return_value = mocked_context MockedOperations.return_value = mocked_op mocked_session = MagicMock() mocked_session.bind.connect.return_value = mocked_connection # WHEN: get_upgrade_op is executed with the mocked session object op = get_upgrade_op(mocked_session) # THEN: The op object should be mocked_op, and the correction function calls should have been made self.assertIs(op, mocked_op, 'The return value should be the mocked object') mocked_session.bind.connect.assert_called_with() MockedMigrationContext.configure.assert_called_with(mocked_connection) MockedOperations.assert_called_with(mocked_context)