def emit(self, record): session = Session() try: session.add(LogEntry(record)) session.commit() finally: session.close()
def cli_search(options): search_term = " ".join(options.keywords) tags = options.tags sources = options.sources def print_ae(ae): diff = datetime.now() - ae.added console( "ID: %-6s | Title: %s\nAdded: %s (%d days ago)\nURL: %s" % (ae.id, ae.title, ae.added, diff.days, ae.url) ) source_names = ", ".join([s.name for s in ae.sources]) tag_names = ", ".join([t.name for t in ae.tags]) console("Source(s): %s | Tag(s): %s" % (source_names or "N/A", tag_names or "N/A")) if ae.description: console("Description: %s" % strip_html(ae.description)) console("---") session = Session() try: console("Searching: %s" % search_term) if tags: console("Tags: %s" % ", ".join(tags)) if sources: console("Sources: %s" % ", ".join(sources)) console("Please wait...") console("") results = False for ae in search(session, search_term, tags=tags, sources=sources): print_ae(ae) results = True if not results: console("No results found.") finally: session.close()
def do_cli(manager, options): session = Session() try: console('-- History: ' + '-' * 67) query = session.query(History) if options.search: search_term = options.search.replace(' ', '%').replace('.', '%') query = query.filter(History.title.like('%' + search_term + '%')) if options.task: query = query.filter(History.task.like('%' + options.task + '%')) query = query.order_by(desc(History.time)).limit(options.limit) for item in reversed(query.all()): if options.short: console(' %-25s %s' % (item.time.strftime("%c"), item.title)) else: console(' Task : %s' % item.task) console(' Title : %s' % item.title) console(' Url : %s' % item.url) if item.filename: console(' Stored : %s' % item.filename) console(' Time : %s' % item.time.strftime("%c")) console(' Details : %s' % item.details) console('-' * 79) finally: session.close()
def cli_search(options): search_term = ' '.join(options.keywords) tags = options.tags sources = options.sources def print_ae(ae): diff = datetime.now() - ae.added console('ID: %-6s | Title: %s\nAdded: %s (%d days ago)\nURL: %s' % (ae.id, ae.title, ae.added, diff.days, ae.url)) source_names = ', '.join([s.name for s in ae.sources]) tag_names = ', '.join([t.name for t in ae.tags]) console('Source(s): %s | Tag(s): %s' % (source_names or 'N/A', tag_names or 'N/A')) if ae.description: console('Description: %s' % strip_html(ae.description)) console('---') session = Session() try: console('Searching: %s' % search_term) if tags: console('Tags: %s' % ', '.join(tags)) if sources: console('Sources: %s' % ', '.join(sources)) console('Please wait...') console('') results = False for ae in search(session, search_term, tags=tags, sources=sources): print_ae(ae) results = True if not results: console('No results found.') finally: session.close()
def on_process_start(self, task): if not task.manager.options.seen_search: return task.manager.disable_tasks() session = Session() shown = [] for field in session.query(SeenField).\ filter(SeenField.value.like(unicode('%' + task.manager.options.seen_search + '%'))).\ order_by(asc(SeenField.added)).all(): se = session.query(SeenEntry).filter( SeenEntry.id == field.seen_entry_id).first() if not se: print 'ERROR: <SeenEntry(id=%s)> missing' % field.seen_entry_id continue # don't show duplicates if se.id in shown: continue shown.append(se.id) print 'ID: %s Name: %s Task: %s Added: %s' % ( se.id, se.title, se.task, se.added.strftime('%c')) for sf in se.fields: print ' %s: %s' % (sf.field, sf.value) print '' if not shown: print 'No results' session.close()
def vacuum(): console("Running VACUUM on sqlite database, this could take a while.") session = Session() session.execute("VACUUM") session.commit() session.close() console("VACUUM complete.")
def get_poster(self, only_cached=False): """Downloads this poster to a local cache and returns the path""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if os.path.isfile(os.path.join(base_dir, self.poster_file or '')): return self.poster_file elif only_cached: return # If we don't already have a local copy, download one. url = get_mirror('banner') + self.poster log.debug('Downloading poster %s' % url) dirname = os.path.join('tvdb', 'posters') # Create folders if the don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.poster)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(requests.get(url).content) self.poster_file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() try: session.query(TVDBSeries).filter(TVDBSeries.id == self.id).update(values={'poster_file': filename}) finally: session.close() return filename
def set_version(plugin, version): if plugin not in plugin_schemas: raise ValueError( 'Tried to set schema version for %s plugin with no versioned_base.' % plugin) base_version = plugin_schemas[plugin]['version'] if version != base_version: raise ValueError('Tried to set %s plugin schema version to %d when ' 'it should be %d as defined in versioned_base.' % (plugin, version, base_version)) session = Session() try: schema = session.query(PluginSchema).filter( PluginSchema.plugin == plugin).first() if not schema: log.debug('Initializing plugin %s schema version to %i' % (plugin, version)) schema = PluginSchema(plugin, version) session.add(schema) else: if version < schema.version: raise ValueError( 'Tried to set plugin %s schema version to lower value' % plugin) if version != schema.version: log.debug('Updating plugin %s schema version to %i' % (plugin, version)) schema.version = version session.commit() finally: session.close()
def get_file(self, only_cached=False): """Makes sure the poster is downloaded to the local cache (in userstatic folder) and returns the path split into a list of directory and file components""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if self.file and os.path.isfile(os.path.join(base_dir, self.file)): return self.file.split(os.sep) elif only_cached: return # If we don't already have a local copy, download one. log.debug('Downloading poster %s' % self.url) dirname = os.path.join('tmdb', 'posters', str(self.movie_id)) # Create folders if they don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.url)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(requests.get(self.url).content) self.file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() poster = session.query(TMDBPoster).filter(TMDBPoster.db_id == self.db_id).first() if poster: poster.file = filename session.commit() session.close() return filename.split(os.sep)
def on_process_start(self, task): # migrate seen to seen_entry session = Session() from flexget.utils.sqlalchemy_utils import table_exists if table_exists('seen', session): self.migrate2() session.close()
def on_process_start(self, task): if not task.manager.options.seen_search: return task.manager.disable_tasks() session = Session() shown = [] for field in session.query(SeenField).\ filter(SeenField.value.like(unicode('%' + task.manager.options.seen_search + '%'))).\ order_by(asc(SeenField.added)).all(): se = session.query(SeenEntry).filter(SeenEntry.id == field.seen_entry_id).first() if not se: print 'ERROR: <SeenEntry(id=%s)> missing' % field.seen_entry_id continue # don't show duplicates if se.id in shown: continue shown.append(se.id) print 'ID: %s Name: %s Task: %s Added: %s' % (se.id, se.title, se.task, se.added.strftime('%c')) for sf in se.fields: print ' %s: %s' % (sf.field, sf.value) print '' if not shown: print 'No results' session.close()
def search(self, task, entry, config=None): """Search plugin API method""" session = Session() entries = set() if isinstance(config, bool): tag_names = None else: tag_names = config try: for query in entry.get('search_strings', [entry['title']]): # clean some characters out of the string for better results query = re.sub(r'[ \(\)\:]+', ' ', query).strip() logger.debug('looking for `{}` config: {}', query, config) for archive_entry in db.search(session, query, tags=tag_names, desc=True): logger.debug('rewrite search result: {}', archive_entry) entry = Entry() entry.update_using_map(self.entry_map, archive_entry, ignore_none=True) if entry.isvalid(): entries.add(entry) finally: session.close() logger.debug('found {} entries', len(entries)) return entries
def log_once(message, logger=logging.getLogger('log_once'), once_level=logging.INFO, suppressed_level=f_logger.VERBOSE): """ Log message only once using given logger`. Returns False if suppressed logging. When suppressed, `suppressed_level` level is still logged. """ # If there is no active manager, don't access the db from flexget.manager import manager if not manager: log.warning('DB not initialized. log_once will not work properly.') logger.log(once_level, message) return digest = hashlib.md5() digest.update(message.encode('latin1', 'replace')) # ticket:250 md5sum = digest.hexdigest() session = Session() try: # abort if this has already been logged if session.query(LogMessage).filter_by(md5sum=md5sum).first(): logger.log(suppressed_level, message) return False row = LogMessage(md5sum) session.add(row) session.commit() finally: session.close() logger.log(once_level, message) return True
def get_series_summary(self): """ :return: Dictionary where key is series name and value is dictionary of summary details. """ result = {} session = Session() try: for series in session.query(Series).all(): name = series.name # capitalize if user hasn't, better look and sorting ... if name.islower(): name = capwords(name) result[name] = {'identified_by': series.identified_by} episode = self.get_latest_download(series) if episode: latest = { 'first_seen': episode.first_seen, 'episode_instance': episode, 'episode_id': episode.identifier, 'age': episode.age, 'status': self.get_latest_status(episode), 'behind': self.new_eps_after(episode) } result[name]['latest'] = latest finally: session.close() return result
def log_once(message, logger=logging.getLogger('log_once')): """ Log message only once using given logger. Returns False if suppressed logging. When suppressed verbose level is still logged. """ digest = hashlib.md5() digest.update(message.encode('latin1', 'replace')) # ticket:250 md5sum = digest.hexdigest() session = Session() try: # abort if this has already been logged if session.query(LogMessage).filter_by(md5sum=md5sum).first(): logger.verbose(message) return False row = LogMessage(md5sum) session.add(row) session.commit() finally: session.close() logger.info(message) return True
def get_series_summary(self): """ :return: Dictionary where key is series name and value is dictionary of summary details. """ result = {} session = Session() try: seriestasks = session.query(SeriesTask).all() if seriestasks: all_series = set(st.series for st in seriestasks) else: all_series = session.query(Series).all() for series in all_series: name = series.name # capitalize if user hasn't, better look and sorting ... if name.islower(): name = capwords(name) result[name] = {'identified_by': series.identified_by} result[name]['in_tasks'] = [task.name for task in series.in_tasks] episode = self.get_latest_download(series) if episode: latest = {'first_seen': episode.first_seen, 'episode_instance': episode, 'episode_id': episode.identifier, 'age': episode.age, 'status': self.get_latest_status(episode), 'behind': self.new_eps_after(episode)} result[name]['latest'] = latest finally: session.close() return result
def search(self, search_term, tags=None): def print_ae(ae): diff = datetime.now() - ae.added console('ID: %-6s | Title: %s\nAdded: %s (%d days ago)\nURL: %s' % (ae.id, ae.title, ae.added, diff.days, ae.url)) source_names = ', '.join([s.name for s in ae.sources]) tag_names = ', '.join([t.name for t in ae.tags]) console('Source(s): %s | Tag(s): %s' % (source_names or 'N/A', tag_names or 'N/A')) if ae.description: console('Description: %s' % strip_html(ae.description)) console('---') session = Session() try: console('Searching: %s' % search_term) if tags: console('Tags: %s' % ', '.join(tags)) console('Please wait ...') console('') for ae in search(session, search_term, tags): print_ae(ae) finally: session.close()
def on_process_start(self, task): if not task.manager.options.tail_reset: return task.manager.disable_tasks() from flexget.utils.simple_persistence import SimpleKeyValue from flexget.manager import Session session = Session() try: poses = session.query(SimpleKeyValue).filter( SimpleKeyValue.key == task.manager.options.tail_reset).all() if not poses: print 'No position stored for file %s' % task.manager.options.tail_reset print 'Note that file must give in same format as in config, ie. ~/logs/log can not be given as /home/user/logs/log' for pos in poses: if pos.value == 0: print 'Task %s tail position is already zero' % pos.task else: print 'Task %s tail position (%s) reseted to zero' % ( pos.task, pos.value) pos.value = 0 session.commit() finally: session.close()
def get_series_summary(self): """ :return: Dictionary where key is series name and value is dictionary of summary details. """ result = {} session = Session() try: for series in session.query(Series).all(): name = series.name # capitalize if user hasn't, better look and sorting ... if name.islower(): name = capwords(name) result[name] = {"identified_by": series.identified_by} episode = self.get_latest_download(series) if episode: latest = { "first_seen": episode.first_seen, "episode_instance": episode, "episode_id": episode.identifier, "age": episode.age, "status": self.get_latest_status(episode), "behind": self.new_eps_after(episode), } result[name]["latest"] = latest finally: session.close() return result
def get_poster(self, only_cached=False): """Downloads this poster to a local cache and returns the path""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if os.path.isfile(os.path.join(base_dir, self.poster_file or '')): return self.poster_file elif only_cached: return # If we don't already have a local copy, download one. url = get_mirror('banner') + self.poster log.debug('Downloading poster %s' % url) dirname = os.path.join('tvdb', 'posters') # Create folders if the don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.poster)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(requests.get(url).content) self.poster_file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() session.query(TVDBSeries).filter(TVDBSeries.id == self.id).update( values={'poster_file': filename}) session.close() return filename
def upgrade_wrapper(manager): ver = get_version(plugin) session = Session() try: new_ver = func(ver, session) if new_ver > ver: log.info('Plugin `%s` schema upgraded successfully' % plugin) set_version(plugin, new_ver) session.commit() manager.db_upgraded = True elif new_ver < ver: log.critical( 'A lower schema version was returned (%s) from the %s upgrade function ' 'than passed in (%s)' % (new_ver, plugin, ver)) manager.shutdown(finish_queue=False) except UpgradeImpossible: log.info( 'Plugin %s database is not upgradable. Flushing data and regenerating.' % plugin) reset_schema(plugin) session.commit() except Exception as e: log.exception('Failed to upgrade database for plugin %s: %s' % (plugin, e)) manager.shutdown(finish_queue=False) finally: session.close()
def forget(value): """ See module docstring :param string value: Can be task name, entry title or field value :return: count, field_count where count is number of entries removed and field_count number of fields """ log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter(or_(SeenEntry.title == value, SeenEntry.task == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter(SeenField.value == value).all(): se = session.query(SeenEntry).filter(SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def queue_add(self, title=None, imdb_id=None, quality='ANY', force=True): """Add an item to the queue with the specified quality""" if not title or not imdb_id: # We don't have all the info we need to add movie, do a lookup for more info result = self.parse_what(imdb_id or title) title = result['title'] imdb_id = result['imdb_id'] quality = self.validate_quality(quality) session = Session() # check if the item is already queued item = session.query(QueuedMovie).filter( QueuedMovie.imdb_id == imdb_id).first() if not item: #TODO: fix item = QueuedMovie(imdb_id=imdb_id, quality=quality, immortal=force, title=title) session.add(item) session.commit() session.close() return { 'title': title, 'imdb_id': imdb_id, 'quality': quality, 'force': force } else: raise QueueError('ERROR: %s is already in the queue' % title)
def tag_source(source_name, tag_names=None): """ Tags all archived entries within a source with supplied tags :param string source_name: Source name :param list tag_names: List of tag names to add """ if not tag_names or tag_names is None: return session = Session() try: # check that source exists source = session.query(ArchiveSource).filter(ArchiveSource.name == source_name).first() if not source: log.critical('Source `%s` does not exists' % source_name) srcs = ', '.join([s.name for s in session.query(ArchiveSource).order_by(ArchiveSource.name)]) if srcs: log.info('Known sources: %s' % srcs) return # construct tags list tags = [] for tag_name in tag_names: tags.append(get_tag(tag_name, session)) # tag 'em log.verbose('Please wait while adding tags %s ...' % (', '.join(tag_names))) for a in session.query(ArchiveEntry).\ filter(ArchiveEntry.sources.any(name=source_name)).yield_per(5): a.tags.extend(tags) finally: session.commit() session.close()
def add_failed(self, entry, reason=None, **kwargs): """Adds entry to internal failed list, displayed with --failed""" reason = reason or 'Unknown' failed = Session() try: # query item's existence item = failed.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['original_url']).first() if not item: item = FailedEntry(entry['title'], entry['original_url'], reason) else: item.count += 1 item.tof = datetime.now() item.reason = reason failed.merge(item) log.debug('Marking %s in failed list. Has failed %s times.' % (item.title, item.count)) # limit item number to 25 for row in failed.query(FailedEntry).order_by( FailedEntry.tof.desc())[25:]: failed.delete(row) failed.commit() finally: failed.close()
def forget(value): """ See module docstring :param string value: Can be task name, entry title or field value :return: count, field_count where count is number of entries removed and field_count number of fields """ log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter( or_(SeenEntry.title == value, SeenEntry.task == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter( SeenField.value == value).all(): se = session.query(SeenEntry).filter( SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def get_file(self, only_cached=False): """Makes sure the poster is downloaded to the local cache (in userstatic folder) and returns the path split into a list of directory and file components""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if self.file and os.path.isfile(os.path.join(base_dir, self.file)): return self.file.split(os.sep) elif only_cached: return # If we don't already have a local copy, download one. log.debug('Downloading poster %s' % self.url) dirname = os.path.join('tmdb', 'posters', str(self.movie_id)) # Create folders if they don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.url)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(urlopener(self.url, log).read()) self.file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() poster = session.query(TMDBPoster).filter( TMDBPoster.db_id == self.db_id).first() if poster: poster.file = filename session.commit() session.close() return filename.split(os.sep)
def clear_backlog(manager): if not manager.options.clear_backlog: return manager.disable_tasks() session = Session() num = session.query(BacklogEntry).delete() session.close() console('%s entries cleared from backlog.' % num)
def vacuum(): console('Running VACUUM on sqlite database, this could take a while.') session = Session() try: session.execute('VACUUM') session.commit() finally: session.close() console('VACUUM complete.')
def consolidate(): """ Converts previous archive data model to new one. """ session = Session() try: log.verbose('Checking archive size ...') count = session.query(ArchiveEntry).count() log.verbose('Found %i items to migrate, this can be aborted with CTRL-C safely.' % count) # consolidate old data from progressbar import ProgressBar, Percentage, Bar, ETA widgets = ['Process - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=count).start() # id's for duplicates duplicates = [] for index, orig in enumerate(session.query(ArchiveEntry).yield_per(5)): bar.update(index) # item already processed if orig.id in duplicates: continue # item already migrated if orig.sources: log.info('Database looks like it has already been consolidated, ' 'item %s has already sources ...' % orig.title) session.rollback() return # add legacy task to the sources list orig.sources.append(get_source(orig.task, session)) # remove task, deprecated .. well, let's still keep it .. # orig.task = None for dupe in session.query(ArchiveEntry).\ filter(ArchiveEntry.id != orig.id).\ filter(ArchiveEntry.title == orig.title).\ filter(ArchiveEntry.url == orig.url).all(): orig.sources.append(get_source(dupe.task, session)) duplicates.append(dupe.id) if duplicates: log.info('Consolidated %i items, removing duplicates ...' % len(duplicates)) for id in duplicates: session.query(ArchiveEntry).filter(ArchiveEntry.id == id).delete() session.commit() log.info('Completed! This does NOT need to be ran again.') except KeyboardInterrupt: session.rollback() log.critical('Aborted, no changes saved') finally: session.close()
def _get_db_last_run(self): session = Session() try: db_trigger = session.query(DBTrigger).get(self.uid) if db_trigger: self.last_run = db_trigger.last_run log.debug('loaded last_run from the database') finally: session.close()
def consolidate(): """ Converts previous archive data model to new one. """ session = Session() try: log.verbose('Checking archive size ...') count = session.query(ArchiveEntry).count() log.verbose('Found %i items to migrate, this can be aborted with CTRL-C safely.' % count) # consolidate old data from progressbar import ProgressBar, Percentage, Bar, ETA widgets = ['Process - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=count).start() # id's for duplicates duplicates = [] for index, orig in enumerate(session.query(ArchiveEntry).yield_per(5)): bar.update(index) # item already processed if orig.id in duplicates: continue # item already migrated if orig.sources: log.info('Database looks like it has already been consolidated, ' 'item %s has already sources ...' % orig.title) session.rollback() return # add legacy task to the sources list orig.sources.append(get_source(orig.task, session)) # remove task, deprecated .. well, let's still keep it .. #orig.task = None for dupe in session.query(ArchiveEntry).\ filter(ArchiveEntry.id != orig.id).\ filter(ArchiveEntry.title == orig.title).\ filter(ArchiveEntry.url == orig.url).all(): orig.sources.append(get_source(dupe.task, session)) duplicates.append(dupe.id) if duplicates: log.info('Consolidated %i items, removing duplicates ...' % len(duplicates)) for id in duplicates: session.query(ArchiveEntry).filter(ArchiveEntry.id == id).delete() session.commit() log.info('Completed! This does NOT need to be ran again.') except KeyboardInterrupt: session.rollback() log.critical('Aborted, no changes saved') finally: session.close()
def _get_db_last_run(self): session = Session() try: db_trigger = session.query(DBTrigger).get(self.uid) if db_trigger: self.last_run = db_trigger.last_run log.debug("loaded last_run from the database") finally: session.close()
def display_details(self, name): """Display detailed series information, ie. --series NAME""" from flexget.manager import Session session = Session() name = unicode(name.lower()) series = session.query(Series).filter(Series.name == name).first() if not series: console("Unknown series `%s`" % name) return console(" %-63s%-15s" % ("Identifier, Title", "Quality")) console("-" * 79) # Query episodes in sane order instead of iterating from series.episodes episodes = session.query(Episode).filter(Episode.series_id == series.id) if series.identified_by == "sequence": episodes = episodes.order_by(Episode.number).all() else: episodes = episodes.order_by(Episode.identifier).all() for episode in episodes: if episode.identifier is None: console(" None <--- Broken!") else: console(" %s (%s) - %s" % (episode.identifier, episode.identified_by or "N/A", episode.age)) for release in episode.releases: status = release.quality.name title = release.title if len(title) > 55: title = title[:55] + "..." if release.proper_count > 0: status += "-proper" if release.proper_count > 1: status += str(release.proper_count) if release.downloaded: console(" * %-60s%-15s" % (title, status)) else: console(" %-60s%-15s" % (title, status)) console("-" * 79) console(" * = downloaded") if not series.identified_by: console("") console(" Series plugin is still learning which episode numbering mode is ") console(" correct for this series (identified_by: auto).") console(" Few duplicate downloads can happen with different numbering schemes") console(" during this time.") else: console(" Series uses `%s` mode to identify episode numbering (identified_by)." % series.identified_by) console(" See option `identified_by` for more information.") session.close()
def display_summary(options): """ Display series summary. :param options: argparse options from the CLI """ formatting = ' %-30s %-10s %-10s %-20s' console(formatting % ('Name', 'Latest', 'Age', 'Downloaded')) console('-' * 79) session = Session() try: query = (session.query(Series).outerjoin(Series.episodes).outerjoin(Episode.releases). outerjoin(Series.in_tasks).group_by(Series.id)) if options.configured == 'configured': query = query.having(func.count(SeriesTask.id) >= 1) elif options.configured == 'unconfigured': query = query.having(func.count(SeriesTask.id) < 1) if options.premieres: query = (query.having(func.max(Episode.season) <= 1).having(func.max(Episode.number) <= 2). having(func.count(SeriesTask.id) < 1)).filter(Release.downloaded == True) if options.new: query = query.having(func.max(Episode.first_seen) > datetime.now() - timedelta(days=options.new)) if options.stale: query = query.having(func.max(Episode.first_seen) < datetime.now() - timedelta(days=options.stale)) for series in query.order_by(Series.name).yield_per(10): series_name = series.name if len(series_name) > 30: series_name = series_name[:27] + '...' new_ep = ' ' behind = 0 status = 'N/A' age = 'N/A' episode_id = 'N/A' latest = get_latest_release(series) if latest: if latest.first_seen > datetime.now() - timedelta(days=2): new_ep = '>' behind = new_eps_after(latest) status = get_latest_status(latest) age = latest.age episode_id = latest.identifier if behind: episode_id += ' +%s' % behind console(new_ep + formatting[1:] % (series_name, episode_id, age, status)) if behind >= 3: console(' ! Latest download is %d episodes behind, this may require ' 'manual intervention' % behind) console('-' * 79) console(' > = new episode ') console(' Use `flexget series show NAME` to get detailed information') finally: session.close()
def upgrade_required(): """Returns true if an upgrade of the database is required.""" session = Session() try: for old_schema in session.query(PluginSchema).all(): if old_schema.plugin in plugin_schemas and old_schema.version < plugin_schemas[old_schema.plugin]['version']: return True return False finally: session.close()
def cli_perf_test(manager, options): if options.test_name not in TESTS: console('Unknown performance test %s' % options.test_name) return session = Session() try: if options.test_name == 'imdb_query': imdb_query(session) finally: session.close()
def clear_rejected(manager): session = Session() try: results = session.query(RememberEntry).delete() console("Cleared %i items." % results) session.commit() if results: manager.config_changed() finally: session.close()
def clear_failed(manager): session = Session() try: results = session.query(FailedEntry).delete() console('Cleared %i items.' % results) session.commit() if results: manager.config_changed() finally: session.close()
def clear_rejected(manager): session = Session() try: results = session.query(RememberEntry).delete() console('Cleared %i items.' % results) session.commit() if results: manager.config_changed() finally: session.close()
def display_details(self, name): """Display detailed series information, ie. --series NAME""" from flexget.manager import Session session = Session() name = unicode(name.lower()) series = session.query(Series).filter(Series.name == name).first() if not series: print 'Unknown series `%s`' % name return print ' %-63s%-15s' % ('Identifier, Title', 'Quality') print '-' * 79 # Query episodes in sane order instead of iterating from series.episodes episodes = session.query(Episode).filter(Episode.series_id == series.id) if series.identified_by == 'sequence': episodes = episodes.order_by(Episode.number).all() else: episodes = episodes.order_by(Episode.identifier).all() for episode in episodes: if episode.identifier is None: print ' None <--- Broken!' else: print ' %s (%s) - %s' % (episode.identifier, episode.identified_by or 'N/A', episode.age) for release in episode.releases: status = release.quality.name title = release.title if len(title) > 55: title = title[:55] + '...' if release.proper_count > 0: status += '-proper' if release.proper_count > 1: status += str(release.proper_count) if release.downloaded: print ' * %-60s%-15s' % (title, status) else: print ' %-60s%-15s' % (title, status) print '-' * 79 print ' * = downloaded' if not series.identified_by: print '' print ' Series plugin is still learning which episode numbering mode is ' print ' correct for this series (identified_by: auto).' print ' Few duplicate downloads can happen with different numbering schemes' print ' during this time.' else: print ' Series uses `%s` mode to identify episode numbering (identified_by).' % series.identified_by print ' See option `identified_by` for more information.' session.close()
def list_failed(): session = Session() try: results = session.query(FailedEntry).all() if not results: console('No failed entries recorded') for entry in results: console('%16s - %s - %s times - %s' % (entry.tof.strftime('%Y-%m-%d %H:%M'), entry.title, entry.count, entry.reason)) finally: session.close()
def clear_failed(self): """Clears list of failed entries""" session = Session() try: results = session.query(FailedEntry).all() for row in results: session.delete(row) console('Cleared %i items.' % len(results)) session.commit() finally: session.close()
def on_process_start(self, feed): # migrate shelve -> sqlalchemy if feed.manager.shelve_session: self.migrate(feed) # migrate seen to seen_entry session = Session() from flexget.utils.sqlalchemy_utils import table_exists if table_exists('seen', session): self.migrate2() session.close()
def get_version(plugin): session = Session() try: schema = session.query(PluginSchema).filter(PluginSchema.plugin == plugin).first() if not schema: log.debug('No schema version stored for %s' % plugin) return None else: return schema.version finally: session.close()
def upgrade_required(): """Returns true if an upgrade of the database is required.""" session = Session() try: for old_schema in session.query(PluginSchema).all(): if old_schema.plugin in plugin_schemas and old_schema.version < plugin_schemas[ old_schema.plugin]['version']: return True return False finally: session.close()
def _set_db_last_run(self): session = Session() try: db_trigger = session.query(DBTrigger).get(self.uid) if not db_trigger: db_trigger = DBTrigger(self.uid) session.add(db_trigger) db_trigger.last_run = self.last_run session.commit() finally: session.close() log.debug('recorded last_run to the database')
def get_version(plugin): session = Session() try: schema = session.query(PluginSchema).filter( PluginSchema.plugin == plugin).first() if not schema: log.debug('No schema version stored for %s' % plugin) return None else: return schema.version finally: session.close()