def execute(self): utils.backup_database(self.connection.path, self.checkMajorDBVersion()) log.info(u'Adding column default_ep_status to tv_shows') self.addColumn('tv_shows', 'default_ep_status', 'NUMERIC', '-1') self.incMajorDBVersion()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.shift_tv_qualities() self.shift_episode_qualities() self.shift_history_qualities() self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.checkMajorDBVersion()) log.info(u'Add minor version numbers to database') self.addColumn('db_version', 'db_minor_version') self.inc_major_version() self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding new airdate_offset field in the tv_shows table') if not self.hasColumn('tv_shows', 'airdate_offset'): self.addColumn('tv_shows', 'airdate_offset', 'NUMERIC', 0) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column info_hash in history') if not self.hasColumn('history', 'info_hash'): self.addColumn('history', 'info_hash', 'TEXT', None) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column size in history') if not self.hasColumn('history', 'size'): self.addColumn('history', 'size', 'NUMERIC', -1) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.shift_history_qualities() self.update_status_unknown() self.inc_major_version() log.info(u'Updated to: {}.{}', *self.connection.version)
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding new quality field in the tv_episodes table') self.connection.action('DROP TABLE IF EXISTS tmp_tv_episodes;') self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes;') self.connection.action( 'CREATE TABLE IF NOT EXISTS tv_episodes ' '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, ' 'name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' 'hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);' ) # Re-insert old values, setting the new quality column to the invalid value of -1 self.connection.action( 'INSERT INTO tv_episodes ' '(showid, indexerid, indexer, name, season, episode, description, airdate, hasnfo, ' 'hastbn, status, quality, location, file_size, release_name, subtitles, subtitles_searchcount, ' 'subtitles_lastsearch, is_proper, scene_season, scene_episode, absolute_number, scene_absolute_number, ' 'version, release_group, manually_searched) ' 'SELECT showid, indexerid, indexer, ' 'name, season, episode, description, airdate, hasnfo, ' 'hastbn, status, -1 AS quality, location, file_size, release_name, ' 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' 'is_proper, scene_season, scene_episode, absolute_number, ' 'scene_absolute_number, version, release_group, manually_searched ' 'FROM tmp_tv_episodes;' ) # We have all that we need, drop the old table for index in ['idx_sta_epi_air', 'idx_sta_epi_sta_air', 'idx_status']: log.info(u'Dropping the index on {0}', index) self.connection.action('DROP INDEX IF EXISTS {index};'.format(index=index)) self.connection.action('DROP TABLE IF EXISTS tmp_tv_episodes;') log.info(u'Splitting the composite status into status and quality') sql_results = self.connection.select('SELECT status from tv_episodes GROUP BY status;') for episode in sql_results: composite_status = episode['status'] status, quality = utils.split_composite_status(composite_status) self.connection.action('UPDATE tv_episodes SET status = ?, quality = ? WHERE status = ?;', [status, quality, composite_status]) # Update `history` table: Remove the quality value from `action` log.info(u'Removing the quality from the action field, as this is a composite status') sql_results = self.connection.select('SELECT action FROM history GROUP BY action;') for item in sql_results: composite_action = item['action'] status, quality = utils.split_composite_status(composite_action) self.connection.action('UPDATE history SET action = ? WHERE action = ?;', [status, composite_action]) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.shift_tv_qualities() self.shift_episode_qualities() self.shift_history_qualities() self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version)
def execute(self): utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column version to tv_episodes and history') self.addColumn("tv_episodes", "version", "NUMERIC", "-1") self.addColumn("tv_episodes", "release_group", "TEXT", "") self.addColumn("history", "version", "NUMERIC", "-1") self.incDBVersion()
def execute(self): """ Updates the version until 44.1 """ utils.backup_database(self.connection.path, self.connection.version) log.info(u'Test major and minor version updates database') self.inc_major_version() self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.checkMajorDBVersion()) log.info(u'Adding column version to tv_episodes and history') self.addColumn('tv_episodes', 'version', 'NUMERIC', '-1') self.addColumn('tv_episodes', 'release_group', 'TEXT', '') self.addColumn('history', 'version', 'NUMERIC', '-1') self.incMajorDBVersion()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column info_hash in history') if not self.hasColumn("history", "info_hash"): self.addColumn("history", "info_hash", 'TEXT', None) self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version)
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u"Adding column size in history") if not self.hasColumn("history", "size"): self.addColumn("history", "size", 'NUMERIC', -1) self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version)
def execute(self): utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Add minor version numbers to database') self.addColumn(b'db_version', b'db_minor_version') self.inc_major_version() self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version)
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding release ignore and require exclude option flags to the tv_shows table') if not self.hasColumn('tv_shows', 'rls_require_exclude'): self.addColumn('tv_shows', 'rls_require_exclude', 'NUMERIC', 0) if not self.hasColumn('tv_shows', 'rls_ignore_exclude'): self.addColumn('tv_shows', 'rls_ignore_exclude', 'NUMERIC', 0) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column plot in imdb_info') if not self.hasColumn('imdb_info', 'plot'): self.addColumn('imdb_info', 'plot', 'TEXT', None) log.info(u'Adding column plot in tv_show') if not self.hasColumn('tv_shows', 'plot'): self.addColumn('tv_shows', 'plot', 'TEXT', None) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info( 'Creating a new table scene_exceptions in the main.db database.') self.connection.action( 'CREATE TABLE scene_exceptions ' '(exception_id INTEGER PRIMARY KEY, indexer INTEGER, series_id INTEGER, title TEXT, ' 'season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);') self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info('Creating a new table custom_logs in the main.db database.') self.connection.action( 'CREATE TABLE custom_logs ' '(log_id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, ' 'identifier TEXT NOT NULL, ' 'level INTEGER NOT NULL DEFAULT 0);') self.inc_minor_version()
def execute(self): """ Updates the version until 44.2 and adds proper_tags column """ utils.backup_database(self.connection.path, self.connection.version) if not self.hasColumn('history', 'proper_tags'): log.info(u'Adding column proper_tags to history') self.addColumn('history', 'proper_tags', 'TEXT', u'') # Call the update old propers once MainSanityCheck(self.connection).update_old_propers() self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding PK to mindexer column in indexer_mapping table') self.connection.action('DROP TABLE IF EXISTS new_indexer_mapping;') self.connection.action('CREATE TABLE IF NOT EXISTS new_indexer_mapping' '(indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER,' 'PRIMARY KEY (indexer_id, indexer, mindexer));') self.connection.action('INSERT INTO new_indexer_mapping SELECT * FROM indexer_mapping;') self.connection.action('DROP TABLE IF EXISTS indexer_mapping;') self.connection.action('ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;') self.connection.action('DROP TABLE IF EXISTS new_indexer_mapping;') self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Addin show_lists field to tv_shows.') if not self.hasColumn('tv_shows', 'show_lists'): self.addColumn('tv_shows', 'show_lists', 'text', 'series') # Shows that are not flagged as anime, put in the anime list self.connection.action("update tv_shows set show_lists = 'series' where anime = 0") # Shows that are flagged as anime, put in the anime list self.connection.action("update tv_shows set show_lists = 'anime' where anime = 1") self.inc_minor_version()
def execute(self): """ Updates the version until 44.3 and adds manually_searched columns """ utils.backup_database(self.connection.path, self.connection.version) if not self.hasColumn('history', 'manually_searched'): log.info(u'Adding column manually_searched to history') self.addColumn('history', 'manually_searched', 'NUMERIC', 0) if not self.hasColumn('tv_episodes', 'manually_searched'): log.info(u'Adding column manually_searched to tv_episodes') self.addColumn('tv_episodes', 'manually_searched', 'NUMERIC', 0) MainSanityCheck(self.connection).update_old_propers() self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.checkMajorDBVersion()) log.info(u'Converting column indexer and default_ep_status field types to numeric') self.connection.action('DROP TABLE IF EXISTS tmp_tv_shows') self.connection.action('ALTER TABLE tv_shows RENAME TO tmp_tv_shows') self.connection.action('CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC,' ' indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT,' ' classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT,' ' flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC,' ' lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT,' ' last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC,' ' rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC,' ' scene NUMERIC, default_ep_status NUMERIC)') self.connection.action('INSERT INTO tv_shows SELECT * FROM tmp_tv_shows') self.connection.action('DROP TABLE tmp_tv_shows') self.incMajorDBVersion()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding PK to mindexer column in indexer_mapping table') self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") self.connection.action( "CREATE TABLE IF NOT EXISTS new_indexer_mapping" "(indexer_id INTEGER, indexer INTEGER, mindexer_id INTEGER, mindexer INTEGER," "PRIMARY KEY (indexer_id, indexer, mindexer));") self.connection.action( "INSERT INTO new_indexer_mapping SELECT * FROM indexer_mapping;") self.connection.action("DROP TABLE IF EXISTS indexer_mapping;") self.connection.action( "ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;") self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version)
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Make indexer and indexer_id as INTEGER in tv_episodes table') self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') self.connection.action( 'CREATE TABLE new_tv_episodes ' '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, ' 'season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, ' 'status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, ' 'subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, ' 'scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, ' 'version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);') self.connection.action('INSERT INTO new_tv_episodes SELECT * FROM tv_episodes;') self.connection.action('DROP TABLE IF EXISTS tv_episodes;') self.connection.action('ALTER TABLE new_tv_episodes RENAME TO tv_episodes;') self.connection.action('DROP TABLE IF EXISTS new_tv_episodoes;') self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) # provider_type flags the history record as 'torrent' or 'nzb' log.info(u'Adding column provider_type to the history table') if not self.hasColumn('history', 'provider_type'): self.addColumn('history', 'provider_type', 'TEXT', '') # client_status tries to keep track of the status on the nzb/torrent client. log.info(u'Adding column client_status to the history table') if not self.hasColumn('history', 'client_status'): self.addColumn('history', 'client_status', 'INTEGER') # part_of_batch flags single snatch results as being part of a multi-ep result. log.info(u'Adding column part_of_batch to the history table') if not self.hasColumn('history', 'part_of_batch'): self.addColumn('history', 'part_of_batch', 'INTEGER') self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info( u'Creating a new table search_templates in the main.db database.') self.connection.action("""CREATE TABLE "search_templates" ( `search_template_id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, `template` TEXT, `title` TEXT, `indexer` INTEGER, `series_id` INTEGER, `season` INTEGER, `enabled` INTEGER DEFAULT 1, `default` INTEGER DEFAULT 1, `season_search` INTEGER DEFAULT 0);""") log.info(u'Adding new templates field in the tv_shows table') if not self.hasColumn('tv_shows', 'templates'): self.addColumn('tv_shows', 'templates', 'NUMERIC', 0) self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding new watched field in the tv_episodes table') self.connection.action('DROP TABLE IF EXISTS tmp_tv_episodes;') self.connection.action('ALTER TABLE tv_episodes RENAME TO tmp_tv_episodes;') self.connection.action( 'CREATE TABLE IF NOT EXISTS tv_episodes ' '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, ' 'name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' 'hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, ' 'manually_searched NUMERIC, watched NUMERIC);' ) # Re-insert old values, setting the new column 'watched' to the default value 0. self.connection.action( 'INSERT INTO tv_episodes ' '(showid, indexerid, indexer, name, season, episode, description, airdate, hasnfo, ' 'hastbn, status, quality, location, file_size, release_name, subtitles, subtitles_searchcount, ' 'subtitles_lastsearch, is_proper, scene_season, scene_episode, absolute_number, scene_absolute_number, ' 'version, release_group, manually_searched, watched) ' 'SELECT showid, indexerid, indexer, ' 'name, season, episode, description, airdate, hasnfo, ' 'hastbn, status, quality, location, file_size, release_name, ' 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' 'is_proper, scene_season, scene_episode, absolute_number, ' 'scene_absolute_number, version, release_group, manually_searched, 0 AS watched ' 'FROM tmp_tv_episodes;' ) self.connection.action('DROP TABLE tmp_tv_episodes;') self.inc_minor_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.translate_status()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column indexer_id in history') if not self.hasColumn('history', 'indexer_id'): self.addColumn('history', 'indexer_id', 'NUMERIC', None) log.info(u'Adding column indexer_id in blacklist') if not self.hasColumn('blacklist', 'indexer_id'): self.addColumn('blacklist', 'indexer_id', 'NUMERIC', None) log.info(u'Adding column indexer_id in whitelist') if not self.hasColumn('whitelist', 'indexer_id'): self.addColumn('whitelist', 'indexer_id', 'NUMERIC', None) log.info(u'Adding column indexer in imdb_info') if not self.hasColumn('imdb_info', 'indexer'): self.addColumn('imdb_info', 'indexer', 'NUMERIC', None) log.info(u'Dropping the unique index on idx_indexer_id') self.connection.action('DROP INDEX IF EXISTS idx_indexer_id') # Add the column imdb_info_id with PK self.connection.action('DROP TABLE IF EXISTS tmp_imdb_info') self.connection.action('ALTER TABLE imdb_info RENAME TO tmp_imdb_info') self.connection.action( 'CREATE TABLE imdb_info(imdb_info_id INTEGER PRIMARY KEY, indexer NUMERIC, indexer_id INTEGER, imdb_id TEXT, ' 'title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, ' 'certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC, plot TEXT)' ) self.connection.action( 'INSERT INTO imdb_info (indexer, indexer_id, imdb_id, title, year, akas, runtimes, ' 'genres, countries, country_codes, certificates, rating, votes, last_update, plot) ' 'SELECT indexer, indexer_id, imdb_id, title, year, akas, runtimes, ' 'genres, countries, country_codes, certificates, rating, votes, last_update, plot FROM tmp_imdb_info' ) self.connection.action('DROP TABLE tmp_imdb_info') # recreate the xem_refresh table, without the primary key on indexer_id. Add the column xem_refresh_id. log.info(u'Dropping the primary key on the table xem_refresh') self.connection.action('DROP TABLE IF EXISTS tmp_xem_refresh') self.connection.action( 'ALTER TABLE xem_refresh RENAME TO tmp_xem_refresh') self.connection.action( 'CREATE TABLE xem_refresh (xem_refresh_id INTEGER PRIMARY KEY, indexer INTEGER, indexer_id INTEGER, last_refreshed INTEGER)' ) self.connection.action( 'INSERT INTO xem_refresh (indexer, indexer_id, last_refreshed) ' 'SELECT CAST(indexer AS INTEGER), indexer_id, last_refreshed FROM tmp_xem_refresh' ) self.connection.action('DROP TABLE tmp_xem_refresh') series_dict = {} def create_series_dict(): """Create a dict with series[indexer]: series_id.""" if not series_dict: # get all the shows. Might need them. all_series = self.connection.select( 'SELECT indexer, indexer_id FROM tv_shows') # check for double for series in all_series: if series['indexer_id'] not in series_dict: series_dict[series['indexer_id']] = series['indexer'] else: log.warning( u'Found a duplicate series id for indexer_id: {0} and indexer: {1}', series['indexer_id'], series['indexer']) # Check if it's required for the main.db tables. for migration_config in (('blacklist', 'show_id', 'indexer_id'), ('whitelist', 'show_id', 'indexer_id'), ('history', 'showid', 'indexer_id'), ('imdb_info', 'indexer_id', 'indexer')): log.info( u'Updating indexer field on table {0}. Using the series id to match with field {1}', migration_config[0], migration_config[1]) query = 'SELECT {config[1]} FROM {config[0]} WHERE {config[2]} IS NULL'.format( config=migration_config) results = self.connection.select(query) if not results: continue create_series_dict() # Updating all rows, using the series id. for series_id in series_dict: # Update the value in the db. # Get the indexer (tvdb, tmdb, tvmaze etc, for this series_id). indexer_id = series_dict.get(series_id) if not indexer_id: continue self.connection.action( 'UPDATE {config[0]} SET {config[2]} = ? WHERE {config[1]} = ?' .format(config=migration_config), [indexer_id, series_id]) self.inc_minor_version() # Flag the image migration. from medusa import app app.MIGRATE_IMAGES = True
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.clear_provider_tables() self.inc_major_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.shift_history_qualities() self.update_status_unknown() self.inc_major_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) self.translate_status() self.inc_major_version()
def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column indexer_id in history') if not self.hasColumn('history', 'indexer_id'): self.addColumn('history', 'indexer_id', 'NUMERIC', None) log.info(u'Adding column indexer_id in blacklist') if not self.hasColumn('blacklist', 'indexer_id'): self.addColumn('blacklist', 'indexer_id', 'NUMERIC', None) log.info(u'Adding column indexer_id in whitelist') if not self.hasColumn('whitelist', 'indexer_id'): self.addColumn('whitelist', 'indexer_id', 'NUMERIC', None) log.info(u'Adding column indexer in imdb_info') if not self.hasColumn('imdb_info', 'indexer'): self.addColumn('imdb_info', 'indexer', 'NUMERIC', None) log.info(u'Dropping the unique index on idx_indexer_id') self.connection.action('DROP INDEX IF EXISTS idx_indexer_id') # Add the column imdb_info_id with PK self.connection.action('DROP TABLE IF EXISTS tmp_imdb_info') self.connection.action('ALTER TABLE imdb_info RENAME TO tmp_imdb_info') self.connection.action( 'CREATE TABLE imdb_info(imdb_info_id INTEGER PRIMARY KEY, indexer NUMERIC, indexer_id INTEGER, imdb_id TEXT, ' 'title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, ' 'certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC, plot TEXT)' ) self.connection.action('INSERT INTO imdb_info (indexer, indexer_id, imdb_id, title, year, akas, runtimes, ' 'genres, countries, country_codes, certificates, rating, votes, last_update, plot) ' 'SELECT indexer, indexer_id, imdb_id, title, year, akas, runtimes, ' 'genres, countries, country_codes, certificates, rating, votes, last_update, plot FROM tmp_imdb_info') self.connection.action('DROP TABLE tmp_imdb_info') # recreate the xem_refresh table, without the primary key on indexer_id. Add the column xem_refresh_id. log.info(u'Dropping the primary key on the table xem_refresh') self.connection.action('DROP TABLE IF EXISTS tmp_xem_refresh') self.connection.action('ALTER TABLE xem_refresh RENAME TO tmp_xem_refresh') self.connection.action( 'CREATE TABLE xem_refresh (xem_refresh_id INTEGER PRIMARY KEY, indexer INTEGER, indexer_id INTEGER, last_refreshed INTEGER)' ) self.connection.action('INSERT INTO xem_refresh (indexer, indexer_id, last_refreshed) ' 'SELECT CAST(indexer AS INTEGER), indexer_id, last_refreshed FROM tmp_xem_refresh') self.connection.action('DROP TABLE tmp_xem_refresh') series_dict = {} def create_series_dict(): """Create a dict with series[indexer]: series_id.""" if not series_dict: # get all the shows. Might need them. all_series = self.connection.select('SELECT indexer, indexer_id FROM tv_shows') # check for double for series in all_series: if series['indexer_id'] not in series_dict: series_dict[series['indexer_id']] = series['indexer'] else: log.warning(u'Found a duplicate series id for indexer_id: {0} and indexer: {1}', series['indexer_id'], series['indexer']) # Check if it's required for the main.db tables. for migration_config in (('blacklist', 'show_id', 'indexer_id'), ('whitelist', 'show_id', 'indexer_id'), ('history', 'showid', 'indexer_id'), ('imdb_info', 'indexer_id', 'indexer')): log.info( u'Updating indexer field on table {0}. Using the series id to match with field {1}', migration_config[0], migration_config[1] ) query = 'SELECT {config[1]} FROM {config[0]} WHERE {config[2]} IS NULL'.format(config=migration_config) results = self.connection.select(query) if not results: continue create_series_dict() # Updating all rows, using the series id. for series_id in series_dict: # Update the value in the db. # Get the indexer (tvdb, tmdb, tvmaze etc, for this series_id). indexer_id = series_dict.get(series_id) if not indexer_id: continue self.connection.action( 'UPDATE {config[0]} SET {config[2]} = ? WHERE {config[1]} = ?'.format(config=migration_config), [indexer_id, series_id]) self.inc_minor_version() # Flag the image migration. from medusa import app app.MIGRATE_IMAGES = True