def upgrade(ver, session):
    if ver is 0:
        table_names = [
            'rottentomatoes_actors',
            'rottentomatoes_alternate_ids',
            'rottentomatoes_directors',
            'rottentomatoes_genres',
            'rottentomatoes_links',
            'rottentomatoes_movie_actors',
            'rottentomatoes_movie_directors',
            'rottentomatoes_movie_genres',
            'rottentomatoes_movies',
            'rottentomatoes_posters',
            'rottentomatoes_releasedates',
            'rottentomatoes_search_results',
        ]
        tables = [table_schema(name, session) for name in table_names]
        for table in tables:
            session.execute(table.delete())
        table_add_column('rottentomatoes_actors', 'rt_id', String, session)
        ver = 1
    if ver is 1:
        table = table_schema('rottentomatoes_search_results', session)
        session.execute(sql.delete(table, table.c.movie_id == None))
        ver = 2
    return ver
Exemple #2
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 0
    if ver == 0:
        # Remove any values that are not loadable.
        table = table_schema('simple_persistence', session)
        for row in session.execute(select([table.c.id, table.c.plugin, table.c.key, table.c.value])):
            try:
                pickle.loads(row['value'])
            except Exception as e:
                log.warning('Couldn\'t load %s:%s removing from db: %s' % (row['plugin'], row['key'], e))
                session.execute(table.delete().where(table.c.id == row['id']))
        ver = 1
    if ver == 1:
        log.info('Creating index on simple_persistence table.')
        create_index('simple_persistence', session, 'feed', 'plugin', 'key')
        ver = 2
    if ver == 2 or ver == 3:
        table = table_schema('simple_persistence', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('simple_persistence', session)
        for row in session.execute(select([table.c.id, table.c.value])):
            p = pickle.loads(row['value'])
            session.execute(table.update().where(table.c.id == row['id']).values(
                json=json.dumps(p, encode_datetime=True)))
        ver = 4
    return ver
def upgrade(ver, session):
    if ver is 0:
        table_names = [
            "rottentomatoes_actors",
            "rottentomatoes_alternate_ids",
            "rottentomatoes_directors",
            "rottentomatoes_genres",
            "rottentomatoes_links",
            "rottentomatoes_movie_actors",
            "rottentomatoes_movie_directors",
            "rottentomatoes_movie_genres",
            "rottentomatoes_movies",
            "rottentomatoes_posters",
            "rottentomatoes_releasedates",
            "rottentomatoes_search_results",
        ]
        tables = [table_schema(name, session) for name in table_names]
        for table in tables:
            session.execute(table.delete())
        table_add_column("rottentomatoes_actors", "rt_id", String, session)
        ver = 1
    if ver is 1:
        table = table_schema("rottentomatoes_search_results", session)
        session.execute(sql.delete(table, table.c.movie_id == None))
        ver = 2
    return ver
Exemple #4
0
def upgrade(ver, session):
    if ver is None:
        log.info('Converting seen imdb_url to imdb_id for seen movies.')
        field_table = table_schema('seen_field', session)
        for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')):
            new_values = {'field': 'imdb_id', 'value': extract_id(row['value'])}
            session.execute(update(field_table, field_table.c.id == row['id'], new_values))
        ver = 1
    if ver == 1:
        field_table = table_schema('seen_field', session)
        log.info('Adding index to seen_field table.')
        Index('ix_seen_field_seen_entry_id', field_table.c.seen_entry_id).create(bind=session.bind)
        ver = 2
    if ver == 2:
        log.info('Adding local column to seen_entry table')
        table_add_column('seen_entry', 'local', Boolean, session, default=False)
        ver = 3
    if ver == 3:
        # setting the default to False in the last migration was broken, fix the data
        log.info('Repairing seen table')
        entry_table = table_schema('seen_entry', session)
        session.execute(update(entry_table, entry_table.c.local == None, {'local': False}))
        ver = 4

    return ver
Exemple #5
0
def upgrade(ver, session):
    if ver is None:
        # Make sure there is no data we can't load in the backlog table
        backlog_table = table_schema('backlog', session)
        try:
            for item in session.query('entry').select_from(backlog_table).all():
                pickle.loads(item.entry)
        except (ImportError, TypeError):
            # If there were problems, we can drop the data.
            log.info('Backlog table contains unloadable data, clearing old data.')
            session.execute(backlog_table.delete())
        ver = 0
    if ver == 0:
        backlog_table = table_schema('backlog', session)
        log.info('Creating index on backlog table.')
        Index('ix_backlog_feed_expire', backlog_table.c.feed, backlog_table.c.expire).create(bind=session.bind)
        ver = 1
    if ver == 1:
        table = table_schema('backlog', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('backlog', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(table.update().where(table.c.id == row['id']).values(
                    json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                log.error('Unable error upgrading backlog pickle object due to %s' % str(e))

        ver = 2
    return ver
Exemple #6
0
def upgrade(ver, session):
    if ver is None:
        log.info('Converting seen imdb_url to imdb_id for seen movies.')
        field_table = table_schema('seen_field', session)
        for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')):
            new_values = {'field': 'imdb_id', 'value': extract_id(row['value'])}
            session.execute(update(field_table, field_table.c.id == row['id'], new_values))
        ver = 1
    if ver == 1:
        field_table = table_schema('seen_field', session)
        log.info('Adding index to seen_field table.')
        Index('ix_seen_field_seen_entry_id', field_table.c.seen_entry_id).create(bind=session.bind)
        ver = 2
    if ver == 2:
        log.info('Adding local column to seen_entry table')
        table_add_column('seen_entry', 'local', Boolean, session, default=False)
        ver = 3
    if ver == 3:
        # setting the default to False in the last migration was broken, fix the data
        log.info('Repairing seen table')
        entry_table = table_schema('seen_entry', session)
        session.execute(update(entry_table, entry_table.c.local == None, {'local': False}))
        ver = 4

    return ver
Exemple #7
0
def upgrade(ver, session):
    if ver == 0:
        table = table_schema('input_cache_entry', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                logger.error(
                    'Unable error upgrading input_cache pickle object due to {}',
                    str(e))
        ver = 1
    if ver == 1:
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 2
    return ver
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 0
    if ver == 0:
        # Remove any values that are not loadable.
        table = table_schema('simple_persistence', session)
        for row in session.execute(select([table.c.id, table.c.plugin, table.c.key, table.c.value])):
            try:
                pickle.loads(row['value'])
            except Exception as e:
                log.warning('Couldn\'t load %s:%s removing from db: %s' % (row['plugin'], row['key'], e))
                session.execute(table.delete().where(table.c.id == row['id']))
        ver = 1
    if ver == 1:
        log.info('Creating index on simple_persistence table.')
        create_index('simple_persistence', session, 'feed', 'plugin', 'key')
        ver = 2
    if ver == 2 or ver == 3:
        table = table_schema('simple_persistence', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('simple_persistence', session)
        for row in session.execute(select([table.c.id, table.c.value])):
            try:
                p = pickle.loads(row['value'])
                session.execute(table.update().where(table.c.id == row['id']).values(
                    json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                log.error('Unable error upgrading simple_persistence pickle object due to %s' % str(e))

        ver = 4
    return ver
Exemple #9
0
def upgrade(ver, session):
    if ver is None:
        log.info("Converting seen imdb_url to imdb_id for seen movies.")
        field_table = table_schema("seen_field", session)
        for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == "imdb_url")):
            new_values = {"field": "imdb_id", "value": extract_id(row["value"])}
            session.execute(update(field_table, field_table.c.id == row["id"], new_values))
        ver = 1
    if ver == 1:
        field_table = table_schema("seen_field", session)
        log.info("Adding index to seen_field table.")
        Index("ix_seen_field_seen_entry_id", field_table.c.seen_entry_id).create(bind=session.bind)
        ver = 2
    if ver == 2:
        log.info("Adding local column to seen_entry table")
        table_add_column("seen_entry", "local", Boolean, session, default=False)
        ver = 3
    if ver == 3:
        # setting the default to False in the last migration was broken, fix the data
        log.info("Repairing seen table")
        entry_table = table_schema("seen_entry", session)
        session.execute(update(entry_table, entry_table.c.local == None, {"local": False}))
        ver = 4

    return ver
Exemple #10
0
def upgrade(ver: int, session: DBSession) -> int:
    if ver == 0:
        table = table_schema('input_cache_entry', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as ex:
                logger.error(
                    f'Unable error upgrading input_cache pickle object due to {ex}'
                )
        ver = 1
    if ver == 1:
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            # Had a weird case of an entry without a title: https://github.com/Flexget/Flexget/issues/2636
            title = data.pop('title', None)
            entry = partial(Entry, **data)
            e = entry(title=str(title)) if title else entry()
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 2
    return ver
Exemple #11
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 1
    if ver == 1:
        table = table_schema('delay', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('delay', session)
        failures = 0
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except (KeyError, ImportError):
                failures += 1
        if failures > 0:
            log.error(
                'Error upgrading %s pickle objects. Some delay information has been lost.'
                % failures)
        ver = 2

    return ver
Exemple #12
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 1
    if ver == 1:
        table = table_schema('delay', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('delay', session)
        failures = 0
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update()
                    .where(table.c.id == row['id'])
                    .values(json=json.dumps(p, encode_datetime=True))
                )
            except (KeyError, ImportError):
                failures += 1
        if failures > 0:
            log.error(
                'Error upgrading %s pickle objects. Some delay information has been lost.'
                % failures
            )
        ver = 2

    return ver
Exemple #13
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns("make_rss", session)
        if not "rsslink" in columns:
            log.info("Adding rsslink column to table make_rss.")
            table_add_column("make_rss", "rsslink", String, session)
        ver = 0
    return ver
Exemple #14
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('make_rss', session)
        if 'rsslink' not in columns:
            log.info('Adding rsslink column to table make_rss.')
            table_add_column('make_rss', 'rsslink', String, session)
        ver = 0
    return ver
Exemple #15
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('make_rss', session)
        if not 'rsslink' in columns:
            log.info('Adding rsslink column to table make_rss.')
            table_add_column('make_rss', 'rsslink', String, session)
        ver = 0
    return ver
Exemple #16
0
def upgrade(ver, session):
    if False:  # ver == 0: disable this, since we don't have a remove column function
        table_add_column('queue', 'last_emit', DateTime, session)
        ver = 1
    if ver < 2:
        # We don't have a remove column for 'last_emit', do nothing
        ver = 2
    return ver
Exemple #17
0
def upgrade(ver, session):
    if False:  # ver == 0: disable this, since we don't have a remove column function
        table_add_column('queue', 'last_emit', DateTime, session)
        ver = 1
    if ver < 2:
        # We don't have a remove column for 'last_emit', do nothing
        ver = 2
    return ver
Exemple #18
0
def upgrade(ver, session):
    if ver is None or ver < 1:
        raise db_schema.UpgradeImpossible
    if ver == 1:
        table_add_column('failed', 'reason', Unicode, session)
        ver = 2
    if ver == 2:
        table_add_column('failed', 'retry_time', DateTime, session)
        ver = 3
    return ver
Exemple #19
0
def upgrade(ver, session):
    if ver is None:
        if 'last_updated' in persist:
            del persist['last_updated']
        ver = 0
    if ver == 0:
        table_add_column('tvdb_episodes', 'gueststars', Unicode, session)
        ver = 1

    return ver
def upgrade(ver, session):
    if ver is None or ver < 1:
        raise db_schema.UpgradeImpossible
    if ver == 1:
        table_add_column('failed', 'reason', Unicode, session)
        ver = 2
    if ver == 2:
        table_add_column('failed', 'retry_time', DateTime, session)
        ver = 3
    return ver
Exemple #21
0
def upgrade(ver, session):
    if ver == 0:
        # Translate old qualities into new quality requirements
        movie_table = table_schema('movie_queue', session)
        for row in session.execute(
                select([movie_table.c.id, movie_table.c.quality])):
            # Webdl quality no longer has dash
            new_qual = row['quality'].replace('web-dl', 'webdl')
            if new_qual.lower() != 'any':
                # Old behavior was to get specified quality or greater, approximate that with new system
                new_qual = ' '.join(qual + '+' for qual in new_qual.split(' '))
            session.execute(
                update(movie_table, movie_table.c.id == row['id'],
                       {'quality': new_qual}))
        ver = 1
    if ver == 1:
        # Bad upgrade left some qualities as 'ANY+'
        movie_table = table_schema('movie_queue', session)
        for row in session.execute(
                select([movie_table.c.id, movie_table.c.quality])):
            if row['quality'].lower() == 'any+':
                session.execute(
                    update(movie_table, movie_table.c.id == row['id'],
                           {'quality': 'ANY'}))
        ver = 2
    if ver == 2:
        from flexget.utils.imdb import ImdbParser
        # Corrupted movie titles may be in the queue due to imdb layout changes. GitHub #729
        movie_table = table_schema('movie_queue', session)
        queue_base_table = table_schema('queue', session)
        query = select([
            movie_table.c.id, movie_table.c.imdb_id, queue_base_table.c.title
        ])
        query = query.where(movie_table.c.id == queue_base_table.c.id)
        for row in session.execute(query):
            if row['imdb_id'] and (not row['title'] or row['title'] == 'None'
                                   or '\n' in row['title']):
                log.info('Fixing movie_queue title for %s' % row['imdb_id'])
                parser = ImdbParser()
                parser.parse(row['imdb_id'])
                if parser.name:
                    session.execute(
                        update(queue_base_table,
                               queue_base_table.c.id == row['id'],
                               {'title': parser.name}))
        ver = 3
    if ver == 3:
        # adding queue_name column to movie_queue table and setting initial value to default)
        table_add_column('movie_queue',
                         'queue_name',
                         Unicode,
                         session,
                         default='default')
        ver = 4
    return ver
Exemple #22
0
def upgrade(ver, session):
    if ver is None:
        # Make sure there is no data we can't load in the backlog table
        backlog_table = table_schema('backlog', session)
        try:
            for item in session.query('entry').select_from(
                    backlog_table).all():
                pickle.loads(item.entry)
        except (ImportError, TypeError):
            # If there were problems, we can drop the data.
            logger.info(
                'Backlog table contains unloadable data, clearing old data.')
            session.execute(backlog_table.delete())
        ver = 0
    if ver == 0:
        backlog_table = table_schema('backlog', session)
        logger.info('Creating index on backlog table.')
        Index('ix_backlog_feed_expire', backlog_table.c.feed,
              backlog_table.c.expire).create(bind=session.bind)
        ver = 1
    if ver == 1:
        table = table_schema('backlog', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('backlog', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                logger.error(
                    'Unable error upgrading backlog pickle object due to {}',
                    str(e))

        ver = 2
    if ver == 2:
        table = table_schema('backlog', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(table.update().where(
                table.c.id == row['id']).values(json=serialization.dumps(e)))

        ver = 3
    return ver
Exemple #23
0
def upgrade(ver, session):
    if ver is None:
        if 'last_updated' in persist:
            del persist['last_updated']
        ver = 0
    if ver == 0:
        table_add_column('tvdb_episodes', 'gueststars', Unicode, session)
        ver = 1
    if ver == 1:
        table_add_column('tvdb_episodes', 'absolute_number', Integer, session)
        ver = 2

    return ver
Exemple #24
0
def upgrade(ver, session):
    if ver is 0:
        table_names = ['rottentomatoes_actors', 'rottentomatoes_alternate_ids',
        'rottentomatoes_directors', 'rottentomatoes_genres', 'rottentomatoes_links',
        'rottentomatoes_movie_actors', 'rottentomatoes_movie_directors',
        'rottentomatoes_movie_genres', 'rottentomatoes_movies', 'rottentomatoes_posters',
        'rottentomatoes_releasedates', 'rottentomatoes_search_results']
        tables = [table_schema(name, session) for name in table_names]
        for table in tables:
            session.execute(table.delete())
        table_add_column('rottentomatoes_actors', 'rt_id', String, session)
        ver = 1
    return ver
Exemple #25
0
def upgrade(ver, session):
    if ver is None:
        # add count column
        table_add_column('failed', 'count', Integer, session, default=1)
        ver = 0
    if ver == 0:
        # define an index
        log.info('Adding database index ...')
        meta = MetaData(bind=session.connection(), reflect=True)
        failed = meta.tables['failed']
        Index('failed_title_url', failed.c.title, failed.c.url, failed.c.count).create()
        ver = 1
    return ver
Exemple #26
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('imdb_movies', session)
        if not 'photo' in columns:
            log.info('Adding photo column to imdb_movies table.')
            table_add_column('imdb_movies', 'photo', String, session)
        if not 'updated' in columns:
            log.info('Adding updated column to imdb_movies table.')
            table_add_column('imdb_movies', 'updated', DateTime, session)
        if not 'mpaa_rating' in columns:
            log.info('Adding mpaa_rating column to imdb_movies table.')
            table_add_column('imdb_movies', 'mpaa_rating', String, session)
        ver = 0
    if ver == 0:
        # create indexes retrospectively (~r2563)
        log.info('Adding imdb indexes delivering up to 20x speed increase \o/ ...')
        indexes = [get_index_by_name(actors_table, 'ix_imdb_movie_actors'),
                   get_index_by_name(genres_table, 'ix_imdb_movie_genres'),
                   get_index_by_name(directors_table, 'ix_imdb_movie_directors')]
        for index in indexes:
            if index is None:
                log.critical('Index adding failure!')
                continue
            log.info('Creating index %s ...' % index.name)
            index.create(bind=session.connection())
        ver = 1
    if ver == 1:
        # http://flexget.com/ticket/1399
        log.info('Adding prominence column to imdb_movie_languages table.')
        table_add_column('imdb_movie_languages', 'prominence', Integer, session)
        ver = 2
    return ver
Exemple #27
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 0
    if ver == 0:
        try:
            # Remove any values that are not loadable.
            table = table_schema('simple_persistence', session)
            for row in session.execute(
                    select([
                        table.c.id, table.c.plugin, table.c.key, table.c.value
                    ])):
                try:
                    pickle.loads(row['value'])
                except Exception as e:
                    logger.warning("Couldn't load {}:{} removing from db: {}",
                                   row['plugin'], row['key'], e)
                    session.execute(
                        table.delete().where(table.c.id == row['id']))
        except Exception as e:
            logger.warning(
                "Couldn't upgrade the simple_persistence table. Commencing nuke. Error: {}",
                e)
            raise db_schema.UpgradeImpossible
        ver = 1
    if ver == 1:
        logger.info('Creating index on simple_persistence table.')
        create_index('simple_persistence', session, 'feed', 'plugin', 'key')
        ver = 2
    if ver == 2 or ver == 3:
        table = table_schema('simple_persistence', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('simple_persistence', session)
        failures = 0
        for row in session.execute(select([table.c.id, table.c.value])):
            try:
                p = pickle.loads(row['value'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except Exception as e:
                failures += 1
        if failures > 0:
            logger.error(
                'Error upgrading {} simple_persistence pickle objects. Some information has been lost.',
                failures,
            )
        ver = 4
    return ver
Exemple #28
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('imdb_movies', session)
        if not 'photo' in columns:
            log.info('Adding photo column to imdb_movies table.')
            table_add_column('imdb_movies', 'photo', String, session)
        if not 'updated' in columns:
            log.info('Adding updated column to imdb_movies table.')
            table_add_column('imdb_movies', 'updated', DateTime, session)
        if not 'mpaa_rating' in columns:
            log.info('Adding mpaa_rating column to imdb_movies table.')
            table_add_column('imdb_movies', 'mpaa_rating', String, session)
        ver = 0
    if ver == 0:
        # create indexes retrospectively (~r2563)
        log.info('Adding imdb indexes delivering up to 20x speed increase \o/ ...')
        indexes = [get_index_by_name(actors_table, 'ix_imdb_movie_actors'),
                   get_index_by_name(genres_table, 'ix_imdb_movie_genres'),
                   get_index_by_name(directors_table, 'ix_imdb_movie_directors')]
        for index in indexes:
            if index is None:
                log.critical('Index adding failure!')
                continue
            log.info('Creating index %s ...' % index.name)
            index.create(bind=session.connection())
        ver = 1
    if ver == 1:
        # http://flexget.com/ticket/1399
        log.info('Adding prominence column to imdb_movie_languages table.')
        table_add_column('imdb_movie_languages', 'prominence', Integer, session)
        ver = 2
    return ver
Exemple #29
0
def upgrade(ver, session):
    if ver is None:
        # add count column
        table_add_column('failed', 'count', Integer, session, default=1)
        ver = 0
    if ver == 0:
        # define an index
        log.info('Adding database index ...')
        failed = table_schema('failed', session)
        Index('failed_title_url', failed.c.title, failed.c.url, failed.c.count).create()
        ver = 1
    if ver == 1:
        table_add_column('failed', 'reason', Unicode, session)
        ver = 2
    return ver
def upgrade(ver, session):
    if ver == 0:
        table = table_schema('input_cache_entry', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(table.update().where(table.c.id == row['id']).values(
                    json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                log.error('Unable error upgrading input_cache pickle object due to %s' % str(e))
        ver = 1
    return ver
Exemple #31
0
def upgrade(ver, session):
    if ver is None:
        # add count column
        table_add_column('failed', 'count', Integer, session, default=1)
        ver = 0
    if ver == 0:
        # define an index
        log.info('Adding database index ...')
        failed = table_schema('failed', session)
        Index('failed_title_url', failed.c.title, failed.c.url,
              failed.c.count).create()
        ver = 1
    if ver == 1:
        table_add_column('failed', 'reason', Unicode, session)
        ver = 2
    return ver
Exemple #32
0
def upgrade(ver, session):
    if ver is None:
        # add count column
        table_add_column('failed', 'count', Integer, session, default=1)
        ver = 0
    if ver == 0:
        # define an index
        log.info('Adding database index ...')
        meta = MetaData(bind=session.connection(), reflect=True)
        failed = meta.tables['failed']
        Index('failed_title_url', failed.c.title, failed.c.url, failed.c.count).create()
        ver = 1
    if ver == 1:
        table_add_column('failed', 'reason', Unicode, session)
        ver = 2
    return ver
Exemple #33
0
def upgrade(ver, session):
    if ver == 0:
        table = table_schema('input_cache_entry', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('input_cache_entry', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                log.error(
                    'Unable error upgrading input_cache pickle object due to %s'
                    % str(e))
        ver = 1
    return ver
Exemple #34
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 1
    if ver == 1:
        table = table_schema('delay', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('delay', session)
        failures = 0
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update()
                    .where(table.c.id == row['id'])
                    .values(json=json.dumps(p, encode_datetime=True))
                )
            except (KeyError, ImportError):
                failures += 1
        if failures > 0:
            logger.error(
                'Error upgrading {} pickle objects. Some delay information has been lost.',
                failures,
            )
        ver = 2
    if ver == 2:
        table = table_schema('delay', session)
        for row in session.execute(select([table.c.id, table.c.json])):
            if not row['json']:
                # Seems there could be invalid data somehow. See #2590
                continue
            data = json.loads(row['json'], decode_datetime=True)
            # If title looked like a date, make sure it's a string
            title = str(data.pop('title'))
            e = Entry(title=title, **data)
            session.execute(
                table.update().where(table.c.id == row['id']).values(json=serialization.dumps(e))
            )
        ver = 3

    return ver
Exemple #35
0
def upgrade(ver, session):
    if ver == 0:
        # Translate old qualities into new quality requirements
        movie_table = table_schema('movie_queue', session)
        for row in session.execute(select([movie_table.c.id, movie_table.c.quality])):
            # Webdl quality no longer has dash
            new_qual = row['quality'].replace('web-dl', 'webdl')
            if new_qual.lower() != 'any':
                # Old behavior was to get specified quality or greater, approximate that with new system
                new_qual = ' '.join(qual + '+' for qual in new_qual.split(' '))
            session.execute(update(movie_table, movie_table.c.id == row['id'],
                                   {'quality': new_qual}))
        ver = 1
    if ver == 1:
        # Bad upgrade left some qualities as 'ANY+'
        movie_table = table_schema('movie_queue', session)
        for row in session.execute(select([movie_table.c.id, movie_table.c.quality])):
            if row['quality'].lower() == 'any+':
                session.execute(update(movie_table, movie_table.c.id == row['id'],
                                       {'quality': 'ANY'}))
        ver = 2
    if ver == 2:
        from flexget.utils.imdb import ImdbParser
        # Corrupted movie titles may be in the queue due to imdb layout changes. GitHub #729
        movie_table = table_schema('movie_queue', session)
        queue_base_table = table_schema('queue', session)
        query = select([movie_table.c.id, movie_table.c.imdb_id, queue_base_table.c.title])
        query = query.where(movie_table.c.id == queue_base_table.c.id)
        for row in session.execute(query):
            if row['imdb_id'] and (not row['title'] or row['title'] == 'None' or '\n' in row['title']):
                log.info('Fixing movie_queue title for %s' % row['imdb_id'])
                parser = ImdbParser()
                parser.parse(row['imdb_id'])
                if parser.name:
                    session.execute(update(queue_base_table, queue_base_table.c.id == row['id'],
                                           {'title': parser.name}))
        ver = 3
    if ver == 3:
        # adding queue_name column to movie_queue table and setting initial value to default)
        table_add_column('movie_queue', 'queue_name', Unicode, session, default='default')
        ver = 4
    return ver
Exemple #36
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 1
    if ver == 1:
        table = table_schema('delay', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('delay', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(table.update().where(table.c.id == row['id']).values(
                    json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                log.error('Unable error upgrading delay pickle object due to %s' % str(e))

        ver = 2

    return ver
Exemple #37
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('remember_rejected_entry', session)
        if 'uid' in columns:
            # Drop the old table
            log.info('Dropping old version of remember_rejected_entry table from db')
            drop_tables(['remember_rejected_entry'], session)
            # Create new table from the current model
            Base.metadata.create_all(bind=session.bind)
            # We go directly to version 2, as remember_rejected_entries table has just been made from current model
            # TODO: Fix this somehow. Just avoid dropping tables?
            ver = 3
        else:
            ver = 0
    if ver == 0:
        log.info('Adding reason column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'reason', String, session)
        ver = 1
    if ver == 1:
        log.info('Adding `added` column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'added', DateTime, session, default=datetime.now)
        ver = 2
    if ver == 2:
        log.info('Adding expires column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'expires', DateTime, session)
        ver = 3
    return ver
Exemple #38
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('remember_rejected_entry', session)
        if 'uid' in columns:
            # Drop the old table
            log.info(
                'Dropping old version of remember_rejected_entry table from db'
            )
            drop_tables(['remember_rejected_entry'], session)
            # Create new table from the current model
            Base.metadata.create_all(bind=session.bind)
            # We go directly to version 2, as remember_rejected_entries table has just been made from current model
            # TODO: Fix this somehow. Just avoid dropping tables?
            ver = 3
        else:
            ver = 0
    if ver == 0:
        log.info('Adding reason column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'reason', String, session)
        ver = 1
    if ver == 1:
        log.info('Adding `added` column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry',
                         'added',
                         DateTime,
                         session,
                         default=datetime.now)
        ver = 2
    if ver == 2:
        log.info('Adding expires column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'expires', DateTime,
                         session)
        ver = 3
    return ver
Exemple #39
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns("imdb_movies", session)
        if not "photo" in columns:
            log.info("Adding photo column to imdb_movies table.")
            table_add_column("imdb_movies", "photo", String, session)
        if not "updated" in columns:
            log.info("Adding updated column to imdb_movies table.")
            table_add_column("imdb_movies", "updated", DateTime, session)
        if not "mpaa_rating" in columns:
            log.info("Adding mpaa_rating column to imdb_movies table.")
            table_add_column("imdb_movies", "mpaa_rating", String, session)
        ver = 0
    if ver == 0:
        # create indexes retrospectively (~r2563)
        log.info("Adding imdb indexes delivering up to 20x speed increase \o/ ...")
        indexes = [
            get_index_by_name(actors_table, "ix_imdb_movie_actors"),
            get_index_by_name(genres_table, "ix_imdb_movie_genres"),
            get_index_by_name(languages_table, "ix_imdb_movie_languages"),
            get_index_by_name(directors_table, "ix_imdb_movie_directors"),
        ]
        for index in indexes:
            if index is None:
                log.critical("Index adding failure!")
                continue
            log.info("Creating index %s ..." % index.name)
            index.create(bind=session.connection())
        ver = 1
    return ver
Exemple #40
0
def upgrade(ver, session):
    if ver is None:
        # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1
        ver = 1
    if ver == 1:
        table = table_schema('delay', session)
        table_add_column(table, 'json', Unicode, session)
        # Make sure we get the new schema with the added column
        table = table_schema('delay', session)
        for row in session.execute(select([table.c.id, table.c.entry])):
            try:
                p = pickle.loads(row['entry'])
                session.execute(
                    table.update().where(table.c.id == row['id']).values(
                        json=json.dumps(p, encode_datetime=True)))
            except KeyError as e:
                log.error(
                    'Unable error upgrading delay pickle object due to %s' %
                    str(e))

        ver = 2

    return ver
Exemple #41
0
def upgrade_database(ver, session):
    if ver <= 2:
        raise db_schema.UpgradeImpossible
    if ver <= 3:
        table_add_column('trakt_movies', 'poster', Unicode, session)
        table_add_column('trakt_shows', 'poster', Unicode, session)
        table_add_column('trakt_episodes', 'poster', Unicode, session)
        ver = 4
    return ver
Exemple #42
0
def upgrade_database(ver, session):
    if ver <= 2:
        raise db_schema.UpgradeImpossible
    if ver <= 3:
        table_add_column('trakt_movies', 'poster', Unicode, session)
        table_add_column('trakt_shows', 'poster', Unicode, session)
        table_add_column('trakt_episodes', 'poster', Unicode, session)
        ver = 4
    return ver
Exemple #43
0
def upgrade(ver, session):
    if ver is None:
        if 'last_updated' in persist:
            del persist['last_updated']
        ver = 0
    if ver == 0:
        table_add_column('tvdb_episodes', 'gueststars', Unicode, session)
        ver = 1
    if ver == 1:
        table_add_column('tvdb_episodes', 'absolute_number', Integer, session)
        ver = 2
    if ver == 2:
        table_add_column('tvdb_series', 'overview', Unicode, session)
        ver = 3
    if ver == 3:
        table_add_column('tvdb_series', 'actors', Unicode, session)
        ver = 4

    return ver
Exemple #44
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('imdb_movies', session)
        if not 'photo' in columns:
            log.info('Adding photo column to imdb_movies table.')
            table_add_column('imdb_movies', 'photo', String, session)
        if not 'updated' in columns:
            log.info('Adding updated column to imdb_movies table.')
            table_add_column('imdb_movies', 'updated', DateTime, session)
        if not 'mpaa_rating' in columns:
            log.info('Adding mpaa_rating column to imdb_movies table.')
            table_add_column('imdb_movies', 'mpaa_rating', String, session)
        ver = 0
    return ver
Exemple #45
0
def upgrade(ver, session):
    if ver is None:
        if "last_updated" in persist:
            del persist["last_updated"]
        ver = 0
    if ver == 0:
        table_add_column("tvdb_episodes", "gueststars", Unicode, session)
        ver = 1
    if ver == 1:
        table_add_column("tvdb_episodes", "absolute_number", Integer, session)
        ver = 2
    if ver == 2:
        table_add_column("tvdb_series", "overview", Unicode, session)
        ver = 3

    return ver
Exemple #46
0
def upgrade(ver, session):
    if ver is None:
        # add count column
        table_add_column("failed", "count", Integer, session, default=1)
        ver = 0
    if ver == 0:
        # define an index
        log.info("Adding database index ...")
        failed = table_schema("failed", session)
        Index("failed_title_url", failed.c.title, failed.c.url, failed.c.count).create()
        ver = 1
    if ver == 1:
        table_add_column("failed", "reason", Unicode, session)
        ver = 2
    if ver == 2:
        table_add_column("failed", "retry_time", DateTime, session)
        ver = 3
    return ver
Exemple #47
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns("remember_rejected_entry", session)
        if "uid" in columns:
            raise db_schema.UpgradeImpossible
        ver = 0
    if ver == 0:
        log.info("Adding reason column to remember_rejected_entry table.")
        table_add_column("remember_rejected_entry", "reason", String, session)
        ver = 1
    if ver == 1:
        log.info("Adding `added` column to remember_rejected_entry table.")
        table_add_column("remember_rejected_entry", "added", DateTime, session, default=datetime.now)
        ver = 2
    if ver == 2:
        log.info("Adding expires column to remember_rejected_entry table.")
        table_add_column("remember_rejected_entry", "expires", DateTime, session)
        ver = 3
    return ver
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('remember_rejected_entry', session)
        if 'uid' in columns:
            raise db_schema.UpgradeImpossible
        ver = 0
    if ver == 0:
        log.info('Adding reason column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'reason', String, session)
        ver = 1
    if ver == 1:
        log.info('Adding `added` column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'added', DateTime, session, default=datetime.now)
        ver = 2
    if ver == 2:
        log.info('Adding expires column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'expires', DateTime, session)
        ver = 3
    return ver
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('remember_rejected_entry', session)
        if 'uid' in columns:
            raise db_schema.UpgradeImpossible
        ver = 0
    if ver == 0:
        log.info('Adding reason column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'reason', String, session)
        ver = 1
    if ver == 1:
        log.info('Adding `added` column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'added', DateTime, session, default=datetime.now)
        ver = 2
    if ver == 2:
        log.info('Adding expires column to remember_rejected_entry table.')
        table_add_column('remember_rejected_entry', 'expires', DateTime, session)
        ver = 3
    return ver
Exemple #50
0
def upgrade(ver, session):
    if ver is None:
        if table_exists('episode_qualities', session):
            log.info('Series database format is too old to upgrade, dropping and recreating tables.')
            # Drop the deprecated data
            drop_tables(['series', 'series_episodes', 'episode_qualities'], session)
            # Create new tables from the current models
            Base.metadata.create_all(bind=session.bind)
        # Upgrade episode_releases table to have a proper count and seed it with appropriate numbers
        columns = table_columns('episode_releases', session)
        if not 'proper_count' in columns:
            log.info('Upgrading episode_releases table to have proper_count column')
            table_add_column('episode_releases', 'proper_count', Integer, session)
            release_table = table_schema('episode_releases', session)
            for row in session.execute(select([release_table.c.id, release_table.c.title])):
                # Recalculate the proper_count from title for old episodes
                proper_count = len([part for part in re.split('[\W_]+', row['title'].lower())
                                    if part in SeriesParser.propers])
                session.execute(update(release_table, release_table.c.id == row['id'], {'proper_count': proper_count}))
        ver = 0
    if ver == 0:
        log.info('Migrating first_seen column from series_episodes to episode_releases table.')
        # Create the column in episode_releases
        table_add_column('episode_releases', 'first_seen', DateTime, session)
        # Seed the first_seen value for all the past releases with the first_seen of their episode.
        episode_table = table_schema('series_episodes', session)
        release_table = table_schema('episode_releases', session)
        for row in session.execute(select([episode_table.c.id, episode_table.c.first_seen])):
            session.execute(update(release_table, release_table.c.episode_id == row['id'],
                                   {'first_seen': row['first_seen']}))
        ver = 1
    if ver == 1:
        log.info('Adding `identified_by` column to series table.')
        table_add_column('series', 'identified_by', String, session)
        ver = 2
    if ver == 2:
        release_table = table_schema('episode_releases', session)
        log.info('Creating index on episode_releases table.')
        Index('ix_episode_releases_episode_id', release_table.c.episode_id).create(bind=session.bind)
        ver = 3

    return ver
Exemple #51
0
def upgrade(ver, session):
    if ver is None:
        log.info(
            'Adding columns to tmdb cache table, marking current cache as expired.'
        )
        table_add_column('tmdb_movies', 'runtime', Integer, session)
        table_add_column('tmdb_movies', 'tagline', Unicode, session)
        table_add_column('tmdb_movies', 'budget', Integer, session)
        table_add_column('tmdb_movies', 'revenue', Integer, session)
        table_add_column('tmdb_movies', 'homepage', String, session)
        table_add_column('tmdb_movies', 'trailer', String, session)
        # Mark all cached movies as expired, so new fields get populated next lookup
        movie_table = table_schema('tmdb_movies', session)
        session.execute(
            movie_table.update(values={'updated': datetime(1970, 1, 1)}))
        ver = 0
    return ver
Exemple #52
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('imdb_movies', session)
        if not 'photo' in columns:
            log.info('Adding photo column to imdb_movies table.')
            table_add_column('imdb_movies', 'photo', String, session)
        if not 'updated' in columns:
            log.info('Adding updated column to imdb_movies table.')
            table_add_column('imdb_movies', 'updated', DateTime, session)
        if not 'mpaa_rating' in columns:
            log.info('Adding mpaa_rating column to imdb_movies table.')
            table_add_column('imdb_movies', 'mpaa_rating', String, session)
        ver = 0
    if ver == 0:
        # create indexes retrospectively (~r2563)
        log.info('Adding imdb indexes delivering up to 20x speed increase \o/ ...')
        indexes = [get_index_by_name(actors_table, 'ix_imdb_movie_actors'),
                   get_index_by_name(genres_table, 'ix_imdb_movie_genres'),
                   get_index_by_name(directors_table, 'ix_imdb_movie_directors')]
        for index in indexes:
            if index is None:
                log.critical('Index adding failure!')
                continue
            log.info('Creating index %s ...' % index.name)
            index.create(bind=session.connection())
        ver = 1
    if ver == 1:
        # http://flexget.com/ticket/1399
        log.info('Adding prominence column to imdb_movie_languages table.')
        table_add_column('imdb_movie_languages', 'prominence', Integer, session)
        ver = 2
    if ver == 2:
        log.info('Adding search result timestamp and clearing all previous results.')
        table_add_column('imdb_search', 'queried', DateTime, session)
        search_table = table_schema('imdb_search', session)
        session.execute(delete(search_table, search_table.c.fails))
        ver = 3
    if ver == 3:
        log.info('Adding original title column, cached data will not have this information')
        table_add_column('imdb_movies', 'original_title', Unicode, session)
        ver = 4
    return ver
Exemple #53
0
def upgrade(ver, session):
    if ver is None:
        columns = table_columns('imdb_movies', session)
        if not 'photo' in columns:
            log.info('Adding photo column to imdb_movies table.')
            table_add_column('imdb_movies', 'photo', String, session)
        if not 'updated' in columns:
            log.info('Adding updated column to imdb_movies table.')
            table_add_column('imdb_movies', 'updated', DateTime, session)
        if not 'mpaa_rating' in columns:
            log.info('Adding mpaa_rating column to imdb_movies table.')
            table_add_column('imdb_movies', 'mpaa_rating', String, session)
        ver = 0
    if ver == 0:
        # create indexes retrospectively (~r2563)
        log.info('Adding imdb indexes delivering up to 20x speed increase \o/ ...')
        indexes = [get_index_by_name(actors_table, 'ix_imdb_movie_actors'),
                   get_index_by_name(genres_table, 'ix_imdb_movie_genres'),
                   get_index_by_name(directors_table, 'ix_imdb_movie_directors')]
        for index in indexes:
            if index is None:
                log.critical('Index adding failure!')
                continue
            log.info('Creating index %s ...' % index.name)
            index.create(bind=session.connection())
        ver = 1
    if ver == 1:
        # http://flexget.com/ticket/1399
        log.info('Adding prominence column to imdb_movie_languages table.')
        table_add_column('imdb_movie_languages', 'prominence', Integer, session)
        ver = 2
    if ver == 2:
        log.info('Adding search result timestamp and clearing all previous results.')
        table_add_column('imdb_search', 'queried', DateTime, session)
        search_table = table_schema('imdb_search', session)
        session.execute(delete(search_table, search_table.c.fails))
        ver = 3
    if ver == 3:
        log.info('Adding original title column, cached data will not have this information')
        table_add_column('imdb_movies', 'original_title', Unicode, session)
        ver = 4
    return ver
Exemple #54
0
def upgrade(ver, session):
    if ver is None:
        log.info('Adding columns to tmdb cache table, marking current cache as expired.')
        table_add_column('tmdb_movies', 'runtime', Integer, session)
        table_add_column('tmdb_movies', 'tagline', Unicode, session)
        table_add_column('tmdb_movies', 'budget', Integer, session)
        table_add_column('tmdb_movies', 'revenue', Integer, session)
        table_add_column('tmdb_movies', 'homepage', String, session)
        table_add_column('tmdb_movies', 'trailer', String, session)
        # Mark all cached movies as expired, so new fields get populated next lookup
        movie_table = table_schema('tmdb_movies', session)
        session.execute(movie_table.update(values={'updated': datetime(1970, 1, 1)}))
        ver = 0
    return ver