from flexget.utils import qualities from flexget.utils.imdb import extract_id, ImdbSearch, ImdbParser from flexget.utils.database import quality_synonym from flexget.utils.tools import console, str_to_boolean from flexget.plugin import DependencyError, PluginError, get_plugin_by_name, register_plugin, register_parser_option from flexget.schema import versioned_base try: from flexget.plugins.filter import queue_base except ImportError: raise DependencyError(issued_by='movie_queue', missing='queue_base', message='movie_queue requires the queue_base plugin') log = logging.getLogger('movie_queue') Base = versioned_base('movie_queue', 0) class QueuedMovie(queue_base.QueuedItem, Base): __tablename__ = 'movie_queue' __mapper_args__ = {'polymorphic_identity': 'movie'} id = Column(Integer, ForeignKey('queue.id'), primary_key=True) imdb_id = Column(String) tmdb_id = Column(Integer) _quality = Column('quality', String) quality = quality_synonym('_quality') class FilterMovieQueue(queue_base.FilterQueueBase): def matches(self, feed, config, entry): # make sure the entry has IMDB fields filled
from __future__ import unicode_literals, division, absolute_import import logging import re from datetime import datetime from sqlalchemy import Column, Unicode, Integer from flexget import validator from flexget.plugin import register_plugin from flexget.utils import requests from flexget.utils.soup import get_soup from flexget.schema import versioned_base log = logging.getLogger('pogcal_acquired') Base = versioned_base('pogcal_acquired', 0) session = requests.Session(config={'max_retries': 2}) class PogcalShow(Base): __tablename__ = 'pogcal_shows' id = Column(Integer, primary_key=True, autoincrement=False, nullable=False) name = Column(Unicode) class PogcalAcquired(object): def validator(self): root = validator.factory('dict') root.accept('text', key='username', required=True) root.accept('text', key='password', required=True) return root
from flexget.plugin import register_plugin, internet, DependencyError from flexget.utils.tools import urlopener from flexget.utils.database import pipe_list_synonym, with_session from flexget.utils.sqlalchemy_utils import drop_tables, table_columns from flexget.utils.cached_input import cached from flexget.entry import Entry try: from flexget.plugins.api_tvdb import lookup_series except ImportError: raise DependencyError( issued_by="thetvdb_favorites", missing="api_tvdb", message="thetvdb_lookup requires the `api_tvdb` plugin" ) log = logging.getLogger("thetvdb_favorites") Base = schema.versioned_base("thetvdb_favorites", 0) @schema.upgrade("thetvdb_favorites") def upgrade(ver, session): if ver is None: columns = table_columns("thetvdb_favorites", session) if not "series_ids" in columns: # Drop the old table log.info("Dropping old version of thetvdb_favorites table from db") drop_tables(["thetvdb_favorites"], session) # Create new table from the current model Base.metadata.create_all(bind=session.bind) ver = 0 return ver
import itertools from sqlalchemy import Column, Unicode, String, Integer from flexget import validator from flexget import schema from flexget.manager import Session, register_config_key from flexget.plugin import (get_plugins_by_phase, get_plugin_by_name, task_phases, PluginWarning, PluginError, DependencyError, plugins as all_plugins) from flexget.utils.simple_persistence import SimpleTaskPersistence from flexget.event import fire_event from flexget.entry import Entry, EntryUnicodeError import flexget.utils.requests as requests log = logging.getLogger('task') Base = schema.versioned_base('feed', 0) class TaskConfigHash(Base): """Stores the config hash for tasks so that we can tell if the config has changed since last run.""" __tablename__ = 'feed_config_hash' id = Column(Integer, primary_key=True) task = Column('name', Unicode, index=True, nullable=False) hash = Column('hash', String) def __repr__(self): return '<TaskConfigHash(task=%s,hash=%s)>' % (self.task, self.hash)
from __future__ import unicode_literals, division, absolute_import import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, Unicode, Boolean, asc, or_, select, update, Index from sqlalchemy.schema import ForeignKey from sqlalchemy.orm import relation from flexget.manager import Session from flexget.event import event from flexget.plugin import register_plugin, priority, register_parser_option from flexget import schema from flexget.utils.sqlalchemy_utils import table_schema, table_add_column from flexget.utils.imdb import is_imdb_url, extract_id log = logging.getLogger('seen') Base = schema.versioned_base('seen', 4) @schema.upgrade('seen') def upgrade(ver, session): if ver is None: log.info('Converting seen imdb_url to imdb_id for seen movies.') field_table = table_schema('seen_field', session) for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')): new_values = {'field': 'imdb_id', 'value': extract_id(row['value'])} session.execute(update(field_table, field_table.c.id == row['id'], new_values)) ver = 1 if ver == 1: field_table = table_schema('seen_field', session) log.info('Adding index to seen_field table.') Index('ix_seen_field_seen_entry_id', field_table.c.seen_entry_id).create(bind=session.bind)
from sqlalchemy import Column, Integer, String, DateTime from flexget import schema from flexget.plugin import register_plugin, DependencyError, PluginWarning try: from flexget.plugins.api_tvdb import lookup_series except ImportError: raise DependencyError( issued_by="myepisodes", missing="api_tvdb", message="myepisodes requires the `api_tvdb` plugin" ) log = logging.getLogger("myepisodes") Base = schema.versioned_base("myepisodes", 0) class MyEpisodesInfo(Base): __tablename__ = "myepisodes" id = Column(Integer, primary_key=True) series_name = Column(String, unique=True) myepisodes_id = Column(Integer, unique=True) updated = Column(DateTime) def __init__(self, series_name, myepisodes_id): self.series_name = series_name self.myepisodes_id = myepisodes_id self.updated = datetime.now()
from sqlalchemy import Table, Column, Integer, Float, String, Unicode, Boolean, DateTime, delete from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation, joinedload_all from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, internet, PluginError, priority from flexget.manager import Session from flexget.utils.log import log_once from flexget.utils.imdb import ImdbSearch, ImdbParser, extract_id, make_url from flexget.utils.sqlalchemy_utils import table_add_column from flexget.utils.database import with_session from flexget.utils.sqlalchemy_utils import table_columns, get_index_by_name, table_schema SCHEMA_VER = 3 Base = schema.versioned_base('imdb_lookup', SCHEMA_VER) # association tables genres_table = Table('imdb_movie_genres', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')), Column('genre_id', Integer, ForeignKey('imdb_genres.id')), Index('ix_imdb_movie_genres', 'movie_id', 'genre_id')) actors_table = Table('imdb_movie_actors', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')), Column('actor_id', Integer, ForeignKey('imdb_actors.id')), Index('ix_imdb_movie_actors', 'movie_id', 'actor_id')) directors_table = Table( 'imdb_movie_directors', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, Unicode, DateTime from sqlalchemy.schema import Index, MetaData from flexget import schema from flexget.plugin import register_plugin, register_parser_option, priority, DependencyError, get_plugin_by_name from flexget.manager import Session from flexget.utils.tools import console, parse_timedelta from flexget.utils.sqlalchemy_utils import table_add_column SCHEMA_VER = 2 log = logging.getLogger('failed') Base = schema.versioned_base('failed', SCHEMA_VER) @schema.upgrade('failed') def upgrade(ver, session): if ver is None: # add count column table_add_column('failed', 'count', Integer, session, default=1) ver = 0 if ver == 0: # define an index log.info('Adding database index ...') meta = MetaData(bind=session.connection(), reflect=True) failed = meta.tables['failed'] Index('failed_title_url', failed.c.title, failed.c.url, failed.c.count).create() ver = 1 if ver == 1: table_add_column('failed', 'reason', Unicode, session)
from flexget import schema from flexget.event import event from flexget.utils import qualities from flexget.utils.log import log_once from flexget.utils.titles import SeriesParser, ParseWarning from flexget.utils.sqlalchemy_utils import table_columns, table_exists, drop_tables, table_schema, table_add_column from flexget.utils.tools import merge_dict_from_to from flexget.utils.database import quality_property, ignore_case_property from flexget.manager import Session from flexget.plugin import (register_plugin, register_parser_option, get_plugin_by_name, get_plugin_keywords, PluginWarning, PluginError, DependencyError, priority) SCHEMA_VER = 2 log = logging.getLogger('series') Base = schema.versioned_base('series', 2) @schema.upgrade('series') def upgrade(ver, session): if ver is None: if table_exists('episode_qualities', session): log.info('Series database format is too old to upgrade, dropping and recreating tables.') # Drop the deprecated data drop_tables(['series', 'series_episodes', 'episode_qualities'], session) # Create new tables from the current models Base.metadata.create_all(bind=session.bind) # Upgrade episode_releases table to have a proper count and seed it with appropriate numbers columns = table_columns('episode_releases', session) if not 'proper_count' in columns: log.info('Upgrading episode_releases table to have proper_count column')
from flexget import schema from flexget.plugin import register_plugin, internet, DependencyError from flexget.utils.tools import urlopener from flexget.utils.database import pipe_list_synonym, with_session from flexget.utils.sqlalchemy_utils import drop_tables, table_columns from flexget.utils.cached_input import cached from flexget.entry import Entry try: from flexget.plugins.api_tvdb import lookup_series except ImportError: raise DependencyError(issued_by='thetvdb_favorites', missing='api_tvdb', message='thetvdb_lookup requires the `api_tvdb` plugin') log = logging.getLogger('thetvdb_favorites') Base = schema.versioned_base('thetvdb_favorites', 0) @schema.upgrade('thetvdb_favorites') def upgrade(ver, session): if ver is None: columns = table_columns('thetvdb_favorites', session) if not 'series_ids' in columns: # Drop the old table log.info('Dropping old version of thetvdb_favorites table from db') drop_tables(['thetvdb_favorites'], session) # Create new table from the current model Base.metadata.create_all(bind=session.bind) ver = 0 return ver
import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, Unicode, DateTime, ForeignKey, and_, Index from sqlalchemy.orm import relation from flexget import schema from flexget.event import event from flexget.plugin import register_plugin, register_parser_option, priority from flexget.utils.sqlalchemy_utils import table_columns, drop_tables, table_add_column from flexget.utils.tools import parse_timedelta log = logging.getLogger('remember_rej') Base = schema.versioned_base('remember_rejected', 3) @schema.upgrade('remember_rejected') def upgrade(ver, session): if ver is None: columns = table_columns('remember_rejected_entry', session) if 'uid' in columns: # Drop the old table log.info( 'Dropping old version of remember_rejected_entry table from db' ) drop_tables(['remember_rejected_entry'], session) # Create new table from the current model Base.metadata.create_all(bind=session.bind) # We go directly to version 2, as remember_rejected_entries table has just been made from current model # TODO: Fix this somehow. Just avoid dropping tables? ver = 3 else: ver = 0
from datetime import datetime import logging from sqlalchemy import Column, Integer, Boolean, String, Unicode, DateTime from flexget.schema import versioned_base from flexget.plugin import priority log = logging.getLogger('queue') Base = versioned_base('queue', 0) class QueuedItem(Base): __tablename__ = 'queue' id = Column(Integer, primary_key=True) title = Column(Unicode) added = Column(DateTime) immortal = Column(Boolean) # These fields are populated when the queue item has been downloaded downloaded = Column(DateTime) entry_title = Column(Unicode) entry_url = Column(Unicode) entry_original_url = Column(Unicode) # Configuration for joined table inheritance discriminator = Column('type', String) __mapper_args__ = {'polymorphic_on': discriminator} def __init__(self, **kwargs): super(QueuedItem, self).__init__(**kwargs) self.added = datetime.now() class FilterQueueBase(object):
from __future__ import unicode_literals, division, absolute_import import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, Unicode, DateTime, ForeignKey, and_, Index from sqlalchemy.orm import relation from flexget import schema from flexget.event import event from flexget.plugin import register_plugin, register_parser_option, priority from flexget.utils.sqlalchemy_utils import table_columns, drop_tables, table_add_column from flexget.utils.tools import parse_timedelta log = logging.getLogger('remember_rej') Base = schema.versioned_base('remember_rejected', 3) @schema.upgrade('remember_rejected') def upgrade(ver, session): if ver is None: columns = table_columns('remember_rejected_entry', session) if 'uid' in columns: # Drop the old table log.info('Dropping old version of remember_rejected_entry table from db') drop_tables(['remember_rejected_entry'], session) # Create new table from the current model Base.metadata.create_all(bind=session.bind) # We go directly to version 2, as remember_rejected_entries table has just been made from current model # TODO: Fix this somehow. Just avoid dropping tables? ver = 3 else: ver = 0 if ver == 0:
import logging import datetime import os from sqlalchemy import Column, Integer, String, DateTime from flexget import schema from flexget.plugin import register_plugin, PluginWarning from flexget.utils.sqlalchemy_utils import table_columns, table_add_column log = logging.getLogger('make_rss') Base = schema.versioned_base('make_rss', 0) rss2gen = True try: import PyRSS2Gen except: rss2gen = False @schema.upgrade('make_rss') def upgrade(ver, session): if ver is None: columns = table_columns('make_rss', session) if not 'rsslink' in columns: log.info('Adding rsslink column to table make_rss.') table_add_column('make_rss', 'rsslink', String, session) ver = 0 return ver class RSSEntry(Base):
import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, Unicode, DateTime, PickleType, Index from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, priority, PluginError, get_plugin_by_name, DependencyError from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta log = logging.getLogger('delay') Base = schema.versioned_base('delay', 1) class DelayedEntry(Base): __tablename__ = 'delay' id = Column(Integer, primary_key=True) feed = Column(String) title = Column(Unicode) expire = Column(DateTime) _entry = Column('entry', PickleType(mutable=False)) entry = safe_pickle_synonym('_entry') def __repr__(self): return '<DelayedEntry(title=%s)>' % self.title Index('delay_feed_title', DelayedEntry.feed, DelayedEntry.title) # TODO: index "expire, feed"
from datetime import datetime import logging from sqlalchemy import Column, Integer, Boolean, String, Unicode, DateTime from flexget.schema import versioned_base log = logging.getLogger('queue') Base = versioned_base('queue', 0) class QueuedItem(Base): __tablename__ = 'queue' id = Column(Integer, primary_key=True) title = Column(Unicode) added = Column(DateTime) immortal = Column(Boolean) # These fields are populated when the queue item has been downloaded downloaded = Column(DateTime) entry_title = Column(Unicode) entry_url = Column(Unicode) entry_original_url = Column(Unicode) # Configuration for joined table inheritance discriminator = Column('type', String) __mapper_args__ = {'polymorphic_on': discriminator} def __init__(self, **kwargs): super(QueuedItem, self).__init__(**kwargs) self.added = datetime.now() class FilterQueueBase(object): """Base class to handle general tasks of keeping a queue of wanted items."""
import copy import logging import hashlib from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, PickleType, Unicode, ForeignKey from sqlalchemy.orm import relation from flexget import schema from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta from flexget.entry import Entry from flexget.event import event from flexget.plugin import PluginError log = logging.getLogger('input_cache') Base = schema.versioned_base('input_cache', 0) class InputCache(Base): __tablename__ = 'input_cache' id = Column(Integer, primary_key=True) name = Column(Unicode) hash = Column(String) added = Column(DateTime, default=datetime.now) entries = relation('InputCacheEntry', backref='cache', cascade='all, delete, delete-orphan') class InputCacheEntry(Base):
from __future__ import unicode_literals, division, absolute_import import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, Unicode, Boolean, asc, or_, select, update, Index from sqlalchemy.schema import ForeignKey from sqlalchemy.orm import relation from flexget.manager import Session from flexget.event import event from flexget.plugin import register_plugin, priority, register_parser_option from flexget import schema from flexget.utils.sqlalchemy_utils import table_schema, table_add_column from flexget.utils.imdb import is_imdb_url, extract_id log = logging.getLogger('seen') Base = schema.versioned_base('seen', 4) @schema.upgrade('seen') def upgrade(ver, session): if ver is None: log.info('Converting seen imdb_url to imdb_id for seen movies.') field_table = table_schema('seen_field', session) for row in session.execute( select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')): new_values = { 'field': 'imdb_id', 'value': extract_id(row['value']) } session.execute(
from flexget.manager import Session from flexget.utils import qualities from flexget.utils.imdb import extract_id from flexget.utils.database import quality_property, with_session from flexget.utils.sqlalchemy_utils import table_exists, table_schema from flexget.plugin import DependencyError, get_plugin_by_name, register_plugin from flexget.event import event try: from flexget.plugins.filter import queue_base except ImportError: raise DependencyError(issued_by='movie_queue', missing='queue_base', message='movie_queue requires the queue_base plugin') log = logging.getLogger('movie_queue') Base = schema.versioned_base('movie_queue', 0) @event('manager.startup') def migrate_imdb_queue(manager): """If imdb_queue table is found, migrate the data to movie_queue""" session = Session() try: if table_exists('imdb_queue', session): log.info('Migrating imdb_queue items to movie_queue') old_table = table_schema('imdb_queue', session) for row in session.execute(old_table.select()): try: queue_add(imdb_id=row['imdb_id'], quality=row['quality'], force=row['immortal'], session=session) except QueueError, e: log.error('Unable to migrate %s from imdb_queue to movie_queue' % row['title'])
from sqlalchemy import Column, Unicode, String, Integer from flexget import validator from flexget import schema from flexget.manager import Session, register_config_key from flexget.plugin import (get_plugins_by_phase, get_plugin_by_name, task_phases, PluginWarning, PluginError, DependencyError, plugins as all_plugins) from flexget.utils.simple_persistence import SimpleTaskPersistence from flexget.event import fire_event from flexget.entry import Entry, EntryUnicodeError import flexget.utils.requests as requests log = logging.getLogger('task') Base = schema.versioned_base('feed', 0) class TaskConfigHash(Base): """Stores the config hash for tasks so that we can tell if the config has changed since last run.""" __tablename__ = 'feed_config_hash' id = Column(Integer, primary_key=True) task = Column('name', Unicode, index=True, nullable=False) hash = Column('hash', String) def __repr__(self): return '<TaskConfigHash(task=%s,hash=%s)>' % (self.task, self.hash)
from flexget.utils import qualities from flexget.utils.imdb import extract_id from flexget.utils.database import quality_requirement_property, with_session from flexget.utils.sqlalchemy_utils import table_exists, table_schema from flexget.plugin import DependencyError, get_plugin_by_name, register_plugin from flexget.event import event try: from flexget.plugins.filter import queue_base except ImportError: raise DependencyError(issued_by='movie_queue', missing='queue_base', message='movie_queue requires the queue_base plugin') log = logging.getLogger('movie_queue') Base = schema.versioned_base('movie_queue', 2) @event('manager.startup') def migrate_imdb_queue(manager): """If imdb_queue table is found, migrate the data to movie_queue""" session = Session() try: if table_exists('imdb_queue', session): log.info('Migrating imdb_queue items to movie_queue') old_table = table_schema('imdb_queue', session) for row in session.execute(old_table.select()): try: queue_add(imdb_id=row['imdb_id'], quality=row['quality'], force=row['immortal'],
import logging from datetime import datetime from sqlalchemy import Column, Integer, String, Unicode, DateTime, PickleType, Index from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, priority, PluginError from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta log = logging.getLogger("delay") Base = schema.versioned_base("delay", 1) class DelayedEntry(Base): __tablename__ = "delay" id = Column(Integer, primary_key=True) task = Column("feed", String) title = Column(Unicode) expire = Column(DateTime) _entry = Column("entry", PickleType) entry = safe_pickle_synonym("_entry") def __repr__(self): return "<DelayedEntry(title=%s)>" % self.title Index("delay_feed_title", DelayedEntry.task, DelayedEntry.title) # TODO: index "expire, task"
import copy import logging import hashlib from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, PickleType, Unicode, ForeignKey from sqlalchemy.orm import relation from flexget import schema from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta from flexget.feed import Entry from flexget.event import event log = logging.getLogger("input_cache") Base = schema.versioned_base("input_cache", 0) class InputCache(Base): __tablename__ = "input_cache" id = Column(Integer, primary_key=True) name = Column(Unicode) hash = Column(String) added = Column(DateTime, default=datetime.now) entries = relation("InputCacheEntry", backref="cache", cascade="all, delete, delete-orphan") class InputCacheEntry(Base): __tablename__ = "input_cache_entry"
import cookielib from datetime import datetime from sqlalchemy import Column, Integer, String, DateTime from flexget import schema from flexget.plugin import register_plugin, DependencyError, PluginWarning try: from flexget.plugins.api_tvdb import lookup_series except ImportError: raise DependencyError(issued_by='myepisodes', missing='api_tvdb', message='myepisodes requires the `api_tvdb` plugin') log = logging.getLogger('myepisodes') Base = schema.versioned_base('myepisodes', 0) class MyEpisodesInfo(Base): __tablename__ = 'myepisodes' id = Column(Integer, primary_key=True) series_name = Column(String, unique=True) # don't know if unique is correct python syntax for saying there must only be one entry with the same content myepisodes_id = Column(Integer, unique=True) updated = Column(DateTime) def __init__(self, series_name, myepisodes_id): self.series_name = series_name self.myepisodes_id = myepisodes_id self.updated = datetime.now()
from __future__ import unicode_literals, division, absolute_import import logging from datetime import datetime from sqlalchemy import Column, Integer, String, Unicode, DateTime, PickleType, Index from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, priority, PluginError from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta log = logging.getLogger('delay') Base = schema.versioned_base('delay', 1) class DelayedEntry(Base): __tablename__ = 'delay' id = Column(Integer, primary_key=True) task = Column('feed', String) title = Column(Unicode) expire = Column(DateTime) _entry = Column('entry', PickleType) entry = safe_pickle_synonym('_entry') def __repr__(self): return '<DelayedEntry(title=%s)>' % self.title Index('delay_feed_title', DelayedEntry.task, DelayedEntry.title) # TODO: index "expire, task"
from sqlalchemy.schema import Table, ForeignKey from flexget import schema from flexget.event import event from flexget.entry import Entry from flexget.plugin import priority, register_parser_option, register_plugin from flexget.utils.sqlalchemy_utils import table_schema, get_index_by_name from flexget.utils.tools import console, strip_html from sqlalchemy import Column, Integer, DateTime, Unicode, Index from datetime import datetime from flexget.manager import Session log = logging.getLogger('archive') SCHEMA_VER = 0 Base = schema.versioned_base('archive', SCHEMA_VER) archive_tags_table = Table('archive_entry_tags', Base.metadata, Column('entry_id', Integer, ForeignKey('archive_entry.id')), Column('tag_id', Integer, ForeignKey('archive_tag.id')), Index('ix_archive_tags', 'entry_id', 'tag_id')) archive_sources_table = Table('archive_entry_sources', Base.metadata, Column('entry_id', Integer, ForeignKey('archive_entry.id')), Column('source_id', Integer, ForeignKey('archive_source.id')), Index('ix_archive_sources', 'entry_id', 'source_id')) class ArchiveEntry(Base): __tablename__ = 'archive_entry' __table_args__ = (Index('ix_archive_title_url', 'title', 'url'),)
from sqlalchemy.schema import ForeignKey from sqlalchemy.orm import relation from requests import RequestException from flexget import schema from flexget.utils.tools import decode_html from flexget.utils.requests import Session as ReqSession from flexget.utils.database import with_session, pipe_list_synonym, text_date_synonym from flexget.utils.sqlalchemy_utils import table_add_column from flexget.manager import Session from flexget.utils.simple_persistence import SimplePersistence SCHEMA_VER = 2 log = logging.getLogger('api_tvdb') Base = schema.versioned_base('api_tvdb', SCHEMA_VER) requests = ReqSession(timeout=25) # This is a FlexGet API key api_key = '4D297D8CFDE0E105' language = 'en' server = 'http://www.thetvdb.com/api/' _mirrors = {} persist = SimplePersistence('api_tvdb') @schema.upgrade('api_tvdb') def upgrade(ver, session): if ver is None: if 'last_updated' in persist: del persist['last_updated']
from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.schema import Table, ForeignKey from sqlalchemy import Column, Integer, DateTime, Unicode, Index from flexget import schema from flexget.event import event from flexget.entry import Entry from flexget.plugin import priority, register_parser_option, register_plugin from flexget.utils.sqlalchemy_utils import table_schema, get_index_by_name from flexget.utils.tools import console, strip_html from flexget.manager import Session log = logging.getLogger('archive') SCHEMA_VER = 0 Base = schema.versioned_base('archive', SCHEMA_VER) archive_tags_table = Table( 'archive_entry_tags', Base.metadata, Column('entry_id', Integer, ForeignKey('archive_entry.id')), Column('tag_id', Integer, ForeignKey('archive_tag.id')), Index('ix_archive_tags', 'entry_id', 'tag_id')) archive_sources_table = Table( 'archive_entry_sources', Base.metadata, Column('entry_id', Integer, ForeignKey('archive_entry.id')), Column('source_id', Integer, ForeignKey('archive_source.id')), Index('ix_archive_sources', 'entry_id', 'source_id')) class ArchiveEntry(Base):
import logging import pickle from datetime import datetime from sqlalchemy import Column, Integer, String, DateTime, PickleType, Index from flexget import schema from flexget.entry import Entry from flexget.manager import Session from flexget.plugin import register_plugin, priority from flexget.utils.database import safe_pickle_synonym from flexget.utils.sqlalchemy_utils import table_schema from flexget.utils.tools import parse_timedelta log = logging.getLogger('backlog') Base = schema.versioned_base('backlog', 1) @schema.upgrade('backlog') def upgrade(ver, session): if ver is None: # Make sure there is no data we can't load in the backlog table backlog_table = table_schema('backlog', session) try: for item in session.query('entry').select_from( backlog_table).all(): pickle.loads(item.entry) except (ImportError, TypeError): # If there were problems, we can drop the data. log.info( 'Backlog table contains unloadable data, clearing old data.') session.execute(backlog_table.delete()) ver = 0
"""Logging utilities""" from __future__ import unicode_literals, division, absolute_import import logging import hashlib from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, Index from flexget import schema from flexget.utils.sqlalchemy_utils import table_schema from flexget.manager import Session from flexget.event import event log = logging.getLogger('util.log') Base = schema.versioned_base('log_once', 0) @schema.upgrade('log_once') def upgrade(ver, session): if ver is None: log.info('Adding index to md5sum column of log_once table.') table = table_schema('log_once', session) Index('log_once_md5sum', table.c.md5sum, unique=True).create() ver = 0 return ver class LogMessage(Base): """Declarative""" __tablename__ = 'log_once'
from datetime import datetime from sqlalchemy import Column, Integer, String, DateTime from flexget import schema from flexget.plugin import register_plugin, DependencyError, PluginWarning try: from flexget.plugins.api_tvdb import lookup_series except ImportError: raise DependencyError(issued_by='myepisodes', missing='api_tvdb', message='myepisodes requires the `api_tvdb` plugin') log = logging.getLogger('myepisodes') Base = schema.versioned_base('myepisodes', 0) class MyEpisodesInfo(Base): __tablename__ = 'myepisodes' id = Column(Integer, primary_key=True) series_name = Column(String, unique=True) myepisodes_id = Column(Integer, unique=True) updated = Column(DateTime) def __init__(self, series_name, myepisodes_id): self.series_name = series_name self.myepisodes_id = myepisodes_id self.updated = datetime.now()
from __future__ import unicode_literals, division, absolute_import import logging import datetime import os from sqlalchemy import Column, Integer, String, DateTime from flexget import schema from flexget.plugin import register_plugin, PluginWarning from flexget.utils.sqlalchemy_utils import table_columns, table_add_column from flexget.utils.template import render_from_entry, get_template log = logging.getLogger("make_rss") Base = schema.versioned_base("make_rss", 0) rss2gen = True try: import PyRSS2Gen except: rss2gen = False @schema.upgrade("make_rss") def upgrade(ver, session): if ver is None: columns = table_columns("make_rss", session) if not "rsslink" in columns: log.info("Adding rsslink column to table make_rss.") table_add_column("make_rss", "rsslink", String, session) ver = 0 return ver
from sqlalchemy import Table, Column, Integer, Float, String, Unicode, Boolean, DateTime, func from sqlalchemy.schema import ForeignKey from sqlalchemy.orm import relation from flexget import schema from flexget.utils import json from flexget.utils.sqlalchemy_utils import table_add_column, table_schema from flexget.utils.titles import MovieParser from flexget.utils.tools import urlopener from flexget.utils.database import text_date_synonym, year_property, with_session from flexget.manager import Session from flexget.plugin import register_plugin log = logging.getLogger('api_tmdb') Base = schema.versioned_base('api_tmdb', 0) # This is a FlexGet API key api_key = 'bdfc018dbdb7c243dc7cb1454ff74b95' lang = 'en' server = 'http://api.themoviedb.org' @schema.upgrade('api_tmdb') def upgrade(ver, session): if ver is None: log.info( 'Adding columns to tmdb cache table, marking current cache as expired.' ) table_add_column('tmdb_movies', 'runtime', Integer, session) table_add_column('tmdb_movies', 'tagline', Unicode, session)
You can safely use task.simple_persistence and manager.persist, if we implement something better we can replace underlying mechanism in single point (and provide transparent switch). """ import logging from datetime import datetime import pickle from sqlalchemy import Column, Integer, String, DateTime, PickleType, select, Index from UserDict import DictMixin from flexget import schema from flexget.manager import Session from flexget.utils.database import safe_pickle_synonym from flexget.utils.sqlalchemy_utils import table_schema, create_index log = logging.getLogger('util.simple_persistence') Base = schema.versioned_base('simple_persistence', 2) @schema.upgrade('simple_persistence') def upgrade(ver, session): if ver is None: # Upgrade to version 0 was a failed attempt at cleaning bad entries from our table, better attempt in ver 1 ver = 0 if ver == 0: # Remove any values that are not loadable. table = table_schema('simple_persistence', session) for row in session.execute( select( [table.c.id, table.c.plugin, table.c.key, table.c.value])): try: p = pickle.loads(row['value'])
from sqlalchemy import Table, Column, Integer, Float, String, Unicode, Boolean, DateTime from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation, joinedload_all from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, internet, PluginError, priority from flexget.manager import Session from flexget.utils.log import log_once from flexget.utils.imdb import ImdbSearch, ImdbParser, extract_id, make_url from flexget.utils.sqlalchemy_utils import table_add_column from flexget.utils.database import with_session from flexget.utils.sqlalchemy_utils import table_columns, get_index_by_name SCHEMA_VER = 1 Base = schema.versioned_base('imdb_lookup', 1) # association tables genres_table = Table('imdb_movie_genres', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')), Column('genre_id', Integer, ForeignKey('imdb_genres.id')), Index('ix_imdb_movie_genres', 'movie_id', 'genre_id')) languages_table = Table('imdb_movie_languages', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')), Column('language_id', Integer, ForeignKey('imdb_languages.id')), Index('ix_imdb_movie_languages', 'movie_id', 'language_id')) actors_table = Table('imdb_movie_actors', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation, joinedload_all from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, internet, PluginError, priority from flexget.manager import Session from flexget.utils.log import log_once from flexget.utils.imdb import ImdbSearch, ImdbParser, extract_id, make_url from flexget.utils.sqlalchemy_utils import table_add_column from flexget.utils.database import with_session from flexget.utils.sqlalchemy_utils import table_columns, get_index_by_name, table_schema SCHEMA_VER = 4 Base = schema.versioned_base('imdb_lookup', SCHEMA_VER) # association tables genres_table = Table('imdb_movie_genres', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')), Column('genre_id', Integer, ForeignKey('imdb_genres.id')), Index('ix_imdb_movie_genres', 'movie_id', 'genre_id')) actors_table = Table('imdb_movie_actors', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')), Column('actor_id', Integer, ForeignKey('imdb_actors.id')), Index('ix_imdb_movie_actors', 'movie_id', 'actor_id')) directors_table = Table('imdb_movie_directors', Base.metadata, Column('movie_id', Integer, ForeignKey('imdb_movies.id')),
import logging import pickle from datetime import datetime from sqlalchemy import Column, Integer, String, DateTime, PickleType, Index from flexget import schema from flexget.entry import Entry from flexget.manager import Session from flexget.plugin import register_plugin, priority from flexget.utils.database import safe_pickle_synonym from flexget.utils.sqlalchemy_utils import table_schema from flexget.utils.tools import parse_timedelta log = logging.getLogger('backlog') Base = schema.versioned_base('backlog', 1) @schema.upgrade('backlog') def upgrade(ver, session): if ver is None: # Make sure there is no data we can't load in the backlog table backlog_table = table_schema('backlog', session) try: for item in session.query('entry').select_from(backlog_table).all(): pickle.loads(item.entry) except (ImportError, TypeError): # If there were problems, we can drop the data. log.info('Backlog table contains unloadable data, clearing old data.') session.execute(backlog_table.delete()) ver = 0 if ver == 0: backlog_table = table_schema('backlog', session)
import copy import logging import hashlib from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, PickleType, Unicode, ForeignKey from sqlalchemy.orm import relation from flexget import schema from flexget.utils.database import safe_pickle_synonym from flexget.utils.tools import parse_timedelta from flexget.entry import Entry from flexget.event import event from flexget.plugin import PluginError log = logging.getLogger('input_cache') Base = schema.versioned_base('input_cache', 0) class InputCache(Base): __tablename__ = 'input_cache' id = Column(Integer, primary_key=True) name = Column(Unicode) hash = Column(String) added = Column(DateTime, default=datetime.now) entries = relation('InputCacheEntry', backref='cache', cascade='all, delete, delete-orphan')
"""Logging utilities""" import logging import hashlib from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, DateTime, Index from flexget import schema from flexget.utils.sqlalchemy_utils import table_schema from flexget.manager import Session from flexget.event import event log = logging.getLogger('util.log') Base = schema.versioned_base('log_once', 0) @schema.upgrade('log_once') def upgrade(ver, session): if ver is None: log.info('Adding index to md5sum column of log_once table.') table = table_schema('log_once', session) Index('log_once_md5sum', table.c.md5sum, unique=True).create() ver = 0 return ver class LogMessage(Base): """Declarative""" __tablename__ = 'log_once' id = Column(Integer, primary_key=True)
from sqlalchemy import Table, Column, Integer, String, DateTime, func, sql from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation from flexget import schema from flexget.plugin import internet, PluginError from flexget.manager import Session from flexget.utils import json from flexget.utils.titles import MovieParser from flexget.utils.tools import urlopener from flexget.utils.database import text_date_synonym from flexget.utils.sqlalchemy_utils import table_schema, table_add_column log = logging.getLogger('api_rottentomatoes') Base = schema.versioned_base('api_rottentomatoes', 2) # This is developer Atlanta800's API key API_KEY = 'rh8chjzp8vu6gnpwj88736uv' API_VER = 'v1.0' SERVER = 'http://api.rottentomatoes.com/api/public' MIN_MATCH = 0.5 MIN_DIFF = 0.01 @schema.upgrade('api_rottentomatoes') def upgrade(ver, session): if ver is 0: table_names = ['rottentomatoes_actors', 'rottentomatoes_alternate_ids', 'rottentomatoes_directors', 'rottentomatoes_genres', 'rottentomatoes_links',
from sqlalchemy import Table, Column, Integer, String, DateTime, func from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation from sqlalchemy import sql from sqlalchemy.exc import IntegrityError from flexget import schema from flexget.plugin import internet, PluginError from flexget.manager import Session from flexget.utils import json from flexget.utils.titles import MovieParser from flexget.utils.tools import urlopener from flexget.utils.database import text_date_synonym from flexget.utils.sqlalchemy_utils import table_schema, table_add_column log = logging.getLogger("api_rottentomatoes") Base = schema.versioned_base("api_rottentomatoes", 2) # This is developer Atlanta800's API key API_KEY = "rh8chjzp8vu6gnpwj88736uv" API_VER = "v1.0" SERVER = "http://api.rottentomatoes.com/api/public" MIN_MATCH = 0.5 MIN_DIFF = 0.01 @schema.upgrade("api_rottentomatoes") def upgrade(ver, session): if ver is 0: table_names = [ "rottentomatoes_actors",
from sqlalchemy import Table, Column, Integer, Float, String, Unicode, Boolean, DateTime from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation, joinedload_all from flexget import schema from flexget.entry import Entry from flexget.plugin import register_plugin, internet, PluginError, priority from flexget.manager import Session from flexget.utils.log import log_once from flexget.utils.imdb import ImdbSearch, ImdbParser, extract_id, make_url from flexget.utils.sqlalchemy_utils import table_add_column from flexget.utils.database import with_session from flexget.utils.sqlalchemy_utils import table_columns, get_index_by_name SCHEMA_VER = 1 Base = schema.versioned_base("imdb_lookup", 1) # association tables genres_table = Table( "imdb_movie_genres", Base.metadata, Column("movie_id", Integer, ForeignKey("imdb_movies.id")), Column("genre_id", Integer, ForeignKey("imdb_genres.id")), Index("ix_imdb_movie_genres", "movie_id", "genre_id"), ) languages_table = Table( "imdb_movie_languages", Base.metadata, Column("movie_id", Integer, ForeignKey("imdb_movies.id")),
import posixpath from sqlalchemy import Table, Column, Integer, Float, String, Unicode, Boolean, DateTime, func from sqlalchemy.schema import ForeignKey from sqlalchemy.orm import relation from flexget import schema from flexget.utils.sqlalchemy_utils import table_add_column, table_schema from flexget.utils.titles import MovieParser from flexget.utils import requests from flexget.utils.database import text_date_synonym, year_property, with_session from flexget.manager import Session from flexget.plugin import register_plugin log = logging.getLogger('api_tmdb') Base = schema.versioned_base('api_tmdb', 0) # This is a FlexGet API key api_key = 'bdfc018dbdb7c243dc7cb1454ff74b95' lang = 'en' server = 'http://api.themoviedb.org' @schema.upgrade('api_tmdb') def upgrade(ver, session): if ver is None: log.info('Adding columns to tmdb cache table, marking current cache as expired.') table_add_column('tmdb_movies', 'runtime', Integer, session) table_add_column('tmdb_movies', 'tagline', Unicode, session) table_add_column('tmdb_movies', 'budget', Integer, session) table_add_column('tmdb_movies', 'revenue', Integer, session)
import os import posixpath from sqlalchemy import Table, Column, Integer, Float, String, Boolean, DateTime, func from sqlalchemy.schema import ForeignKey, Index from sqlalchemy.orm import relation from flexget import schema from flexget.utils import json from flexget.utils.sqlalchemy_utils import table_add_column, table_schema from flexget.utils.titles import MovieParser from flexget.utils.tools import urlopener from flexget.utils.database import text_date_synonym, year_property, with_session from flexget.manager import Session from flexget.plugin import register_plugin, DependencyError log = logging.getLogger('api_rottentomatoes') Base = schema.versioned_base('api_rottentomatoes', 0) # This is developer Atlanta800's API key API_KEY = 'rh8chjzp8vu6gnpwj88736uv' API_VER = 'v1.0' SERVER = 'http://api.rottentomatoes.com/api/public' # association tables genres_table = Table('rottentomatoes_movie_genres', Base.metadata, Column('movie_id', Integer, ForeignKey('rottentomatoes_movies.id')), Column('genre_id', Integer, ForeignKey('rottentomatoes_genres.id')), Index('ix_rottentomatoes_movie_genres', 'movie_id', 'genre_id')) actors_table = Table('rottentomatoes_movie_actors', Base.metadata, Column('movie_id', Integer, ForeignKey('rottentomatoes_movies.id')),
from __future__ import unicode_literals, division, absolute_import import logging from datetime import datetime, timedelta from sqlalchemy import Column, Integer, String, Unicode, DateTime from sqlalchemy.schema import Index, MetaData from flexget import schema from flexget.plugin import register_plugin, register_parser_option, priority, DependencyError, get_plugin_by_name from flexget.manager import Session from flexget.utils.tools import console, parse_timedelta from flexget.utils.sqlalchemy_utils import table_add_column SCHEMA_VER = 2 log = logging.getLogger('failed') Base = schema.versioned_base('failed', SCHEMA_VER) @schema.upgrade('failed') def upgrade(ver, session): if ver is None: # add count column table_add_column('failed', 'count', Integer, session, default=1) ver = 0 if ver == 0: # define an index log.info('Adding database index ...') meta = MetaData(bind=session.connection(), reflect=True) failed = meta.tables['failed'] Index('failed_title_url', failed.c.title, failed.c.url, failed.c.count).create() ver = 1 if ver == 1:
from __future__ import unicode_literals, division, absolute_import from datetime import datetime import logging from sqlalchemy import Column, Integer, Boolean, String, Unicode, DateTime from flexget import schema from flexget.plugin import priority from flexget.utils.sqlalchemy_utils import table_add_column log = logging.getLogger('queue') Base = schema.versioned_base('queue', 2) @schema.upgrade('queue') def upgrade(ver, session): if False: # ver == 0: disable this, since we don't have a remove column function table_add_column('queue', 'last_emit', DateTime, session) ver = 1 if ver < 2: # We don't have a remove column for 'last_emit', do nothing ver = 2 return ver class QueuedItem(Base): __tablename__ = 'queue' id = Column(Integer, primary_key=True) title = Column(Unicode) added = Column(DateTime) immortal = Column(Boolean)
from __future__ import unicode_literals, division, absolute_import import logging import datetime import os from sqlalchemy import Column, Integer, String, DateTime from flexget import schema from flexget.plugin import register_plugin, PluginWarning from flexget.utils.sqlalchemy_utils import table_columns, table_add_column from flexget.utils.template import render_from_entry, get_template log = logging.getLogger('make_rss') Base = schema.versioned_base('make_rss', 0) rss2gen = True try: import PyRSS2Gen except: rss2gen = False @schema.upgrade('make_rss') def upgrade(ver, session): if ver is None: columns = table_columns('make_rss', session) if not 'rsslink' in columns: log.info('Adding rsslink column to table make_rss.') table_add_column('make_rss', 'rsslink', String, session) ver = 0 return ver
from flexget import schema from flexget.event import event from flexget.utils import qualities from flexget.utils.log import log_once from flexget.utils.titles import SeriesParser, ParseWarning from flexget.utils.sqlalchemy_utils import table_columns, table_exists, drop_tables, table_schema, table_add_column from flexget.utils.tools import merge_dict_from_to, parse_timedelta from flexget.utils.database import quality_property, ignore_case_property from flexget.manager import Session from flexget.plugin import (register_plugin, register_parser_option, get_plugin_by_name, get_plugin_keywords, PluginWarning, DependencyError, priority) SCHEMA_VER = 3 log = logging.getLogger('series') Base = schema.versioned_base('series', SCHEMA_VER) @schema.upgrade('series') def upgrade(ver, session): if ver is None: if table_exists('episode_qualities', session): log.info('Series database format is too old to upgrade, dropping and recreating tables.') # Drop the deprecated data drop_tables(['series', 'series_episodes', 'episode_qualities'], session) # Create new tables from the current models Base.metadata.create_all(bind=session.bind) # Upgrade episode_releases table to have a proper count and seed it with appropriate numbers columns = table_columns('episode_releases', session) if not 'proper_count' in columns: log.info('Upgrading episode_releases table to have proper_count column')