def forget(value): """ See module docstring :param string value: Can be task name, entry title or field value :return: count, field_count where count is number of entries removed and field_count number of fields """ log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter( or_(SeenEntry.title == value, SeenEntry.task == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter( SeenField.value == value).all(): se = session.query(SeenEntry).filter( SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def tag_source(source_name, tag_names=None): """ Tags all archived entries within a source with supplied tags :param string source_name: Source name :param list tag_names: List of tag names to add """ if not tag_names or tag_names is None: return session = Session() try: # check that source exists source = session.query(ArchiveSource).filter(ArchiveSource.name == source_name).first() if not source: log.critical('Source `%s` does not exists' % source_name) srcs = ', '.join([s.name for s in session.query(ArchiveSource).order_by(ArchiveSource.name)]) if srcs: log.info('Known sources: %s' % srcs) return # construct tags list tags = [] for tag_name in tag_names: tags.append(get_tag(tag_name, session)) # tag 'em log.verbose('Please wait while adding tags %s ...' % (', '.join(tag_names))) for a in session.query(ArchiveEntry).\ filter(ArchiveEntry.sources.any(name=source_name)).yield_per(5): a.tags.extend(tags) finally: session.commit() session.close()
def age_series(**kwargs): from flexget.plugins.filter.series import Release from flexget.manager import Session import datetime session = Session() session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)}) session.commit()
def add_failed(self, entry, reason=None, **kwargs): """Adds entry to internal failed list, displayed with --failed""" reason = reason or 'Unknown' failed = Session() try: # query item's existence item = failed.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['original_url']).first() if not item: item = FailedEntry(entry['title'], entry['original_url'], reason) else: item.count += 1 item.tof = datetime.now() item.reason = reason failed.merge(item) log.debug('Marking %s in failed list. Has failed %s times.' % (item.title, item.count)) # limit item number to 25 for row in failed.query(FailedEntry).order_by( FailedEntry.tof.desc())[25:]: failed.delete(row) failed.commit() finally: failed.close()
def forget(value): """ See module docstring :param string value: Can be task name, entry title or field value :return: count, field_count where count is number of entries removed and field_count number of fields """ log.debug('forget called with %s' % value) session = Session() try: count = 0 field_count = 0 for se in session.query(SeenEntry).filter(or_(SeenEntry.title == value, SeenEntry.task == value)).all(): field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) for sf in session.query(SeenField).filter(SeenField.value == value).all(): se = session.query(SeenEntry).filter(SeenEntry.id == sf.seen_entry_id).first() field_count += len(se.fields) count += 1 log.debug('forgetting %s' % se) session.delete(se) return count, field_count finally: session.commit() session.close()
def get_poster(self, only_cached=False): """Downloads this poster to a local cache and returns the path""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if os.path.isfile(os.path.join(base_dir, self.poster_file or '')): return self.poster_file elif only_cached: return # If we don't already have a local copy, download one. url = get_mirror('banner') + self.poster log.debug('Downloading poster %s' % url) dirname = os.path.join('tvdb', 'posters') # Create folders if the don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.poster)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(requests.get(url).content) self.poster_file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() session.query(TVDBSeries).filter(TVDBSeries.id == self.id).update( values={'poster_file': filename}) session.close() return filename
def on_process_start(self, feed): if not feed.manager.options.repair_seen_movies: return feed.manager.disable_feeds() from progressbar import ProgressBar, Percentage, Bar, ETA from flexget.manager import Session from seen import SeenField from flexget.utils.imdb import extract_id session = Session() index = 0 count = 0 total = session.query(SeenField).filter(SeenField.field == u'imdb_url').count() widgets = ['Repairing: ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=total).start() for seen in session.query(SeenField).filter(SeenField.field == u'imdb_url').all(): index += 1 if index % 5 == 0: bar.update(index) value = u'http://www.imdb.com/title/%s/' % extract_id(seen.value) if value != seen.value: count += 1 seen.value = value seen.field = unicode('imdb_url') bar.finish() session.commit() print 'Fixed %s/%s URLs' % (count, total)
def estimate(self, entry): if all(field in entry for field in ['series_name', 'series_season', 'series_episode']): # Try to get airdate from tvrage first if api_tvrage: season = entry['series_season'] if entry.get('series_id_type') == 'sequence': # Tvrage has absolute numbered shows under season 1 season = 1 log.debug("Querying release estimation for %s S%02dE%02d ..." % (entry['series_name'], season, entry['series_episode'])) try: series_info = lookup_series(name=entry['series_name']) except LookupError as e: log.debug('tvrage lookup error: %s' % e) else: if series_info: try: episode_info = series_info.find_episode(season, entry['series_episode']) if episode_info: return episode_info.airdate else: # If episode does not exist in tvrage database, we always return a future date log.verbose('%s S%02dE%02d does not exist in tvrage database, assuming unreleased', series_info.name, season, entry['series_episode']) return datetime.now() + timedelta(weeks=4) except Exception as e: log.exception(e) else: log.debug('No series info obtained from TVRage to %s' % entry['series_name']) log.debug('No episode info obtained from TVRage for %s season %s episode %s' % (entry['series_name'], entry['series_season'], entry['series_episode'])) # If no results from tvrage, estimate a date based on series history session = Session() series = session.query(Series).filter(Series.name == entry['series_name']).first() if not series: return episodes = (session.query(Episode).join(Episode.series). filter(Episode.season != None). filter(Series.id == series.id). filter(Episode.season == func.max(Episode.season).select()). order_by(desc(Episode.number)).limit(2).all()) if len(episodes) < 2: return # If last two eps were not contiguous, don't guess if episodes[0].number != episodes[1].number + 1: return last_diff = episodes[0].first_seen - episodes[1].first_seen # If last eps were grabbed close together, we might be catching up, don't guess # Or, if last eps were too far apart, don't guess # TODO: What range? if last_diff < timedelta(days=2) or last_diff > timedelta(days=10): return # Estimate next season somewhat more than a normal episode break if entry['series_season'] > episodes[0].season: # TODO: How big should this be? return episodes[0].first_seen + multiply_timedelta(last_diff, 2) # Estimate next episode comes out about same length as last ep span, with a little leeway return episodes[0].first_seen + multiply_timedelta(last_diff, 0.9)
def get_poster(self, only_cached=False): """Downloads this poster to a local cache and returns the path""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if os.path.isfile(os.path.join(base_dir, self.poster_file or '')): return self.poster_file elif only_cached: return # If we don't already have a local copy, download one. url = get_mirror('banner') + self.poster log.debug('Downloading poster %s' % url) dirname = os.path.join('tvdb', 'posters') # Create folders if the don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.poster)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(requests.get(url).content) self.poster_file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() try: session.query(TVDBSeries).filter(TVDBSeries.id == self.id).update(values={'poster_file': filename}) finally: session.close() return filename
def on_process_start(self, task): if not task.manager.options.seen_search: return task.manager.disable_tasks() session = Session() shown = [] for field in session.query(SeenField).\ filter(SeenField.value.like(unicode('%' + task.manager.options.seen_search + '%'))).\ order_by(asc(SeenField.added)).all(): se = session.query(SeenEntry).filter( SeenEntry.id == field.seen_entry_id).first() if not se: print 'ERROR: <SeenEntry(id=%s)> missing' % field.seen_entry_id continue # don't show duplicates if se.id in shown: continue shown.append(se.id) print 'ID: %s Name: %s Task: %s Added: %s' % ( se.id, se.title, se.task, se.added.strftime('%c')) for sf in se.fields: print ' %s: %s' % (sf.field, sf.value) print '' if not shown: print 'No results' session.close()
def on_process_start(self, task): if not task.manager.options.seen_search: return task.manager.disable_tasks() session = Session() shown = [] for field in session.query(SeenField).\ filter(SeenField.value.like(unicode('%' + task.manager.options.seen_search + '%'))).\ order_by(asc(SeenField.added)).all(): se = session.query(SeenEntry).filter(SeenEntry.id == field.seen_entry_id).first() if not se: print 'ERROR: <SeenEntry(id=%s)> missing' % field.seen_entry_id continue # don't show duplicates if se.id in shown: continue shown.append(se.id) print 'ID: %s Name: %s Task: %s Added: %s' % (se.id, se.title, se.task, se.added.strftime('%c')) for sf in se.fields: print ' %s: %s' % (sf.field, sf.value) print '' if not shown: print 'No results' session.close()
def get_series_summary(self): """ :return: Dictionary where key is series name and value is dictionary of summary details. """ result = {} session = Session() try: seriestasks = session.query(SeriesTask).all() if seriestasks: all_series = set(st.series for st in seriestasks) else: all_series = session.query(Series).all() for series in all_series: name = series.name # capitalize if user hasn't, better look and sorting ... if name.islower(): name = capwords(name) result[name] = {'identified_by': series.identified_by} result[name]['in_tasks'] = [task.name for task in series.in_tasks] episode = self.get_latest_download(series) if episode: latest = {'first_seen': episode.first_seen, 'episode_instance': episode, 'episode_id': episode.identifier, 'age': episode.age, 'status': self.get_latest_status(episode), 'behind': self.new_eps_after(episode)} result[name]['latest'] = latest finally: session.close() return result
def consolidate(): """ Converts previous archive data model to new one. """ session = Session() try: log.verbose('Checking archive size ...') count = session.query(ArchiveEntry).count() log.verbose('Found %i items to migrate, this can be aborted with CTRL-C safely.' % count) # consolidate old data from progressbar import ProgressBar, Percentage, Bar, ETA widgets = ['Process - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=count).start() # id's for duplicates duplicates = [] for index, orig in enumerate(session.query(ArchiveEntry).yield_per(5)): bar.update(index) # item already processed if orig.id in duplicates: continue # item already migrated if orig.sources: log.info('Database looks like it has already been consolidated, ' 'item %s has already sources ...' % orig.title) session.rollback() return # add legacy task to the sources list orig.sources.append(get_source(orig.task, session)) # remove task, deprecated .. well, let's still keep it .. # orig.task = None for dupe in session.query(ArchiveEntry).\ filter(ArchiveEntry.id != orig.id).\ filter(ArchiveEntry.title == orig.title).\ filter(ArchiveEntry.url == orig.url).all(): orig.sources.append(get_source(dupe.task, session)) duplicates.append(dupe.id) if duplicates: log.info('Consolidated %i items, removing duplicates ...' % len(duplicates)) for id in duplicates: session.query(ArchiveEntry).filter(ArchiveEntry.id == id).delete() session.commit() log.info('Completed! This does NOT need to be ran again.') except KeyboardInterrupt: session.rollback() log.critical('Aborted, no changes saved') finally: session.close()
def consolidate(): """ Converts previous archive data model to new one. """ session = Session() try: log.verbose('Checking archive size ...') count = session.query(ArchiveEntry).count() log.verbose('Found %i items to migrate, this can be aborted with CTRL-C safely.' % count) # consolidate old data from progressbar import ProgressBar, Percentage, Bar, ETA widgets = ['Process - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=count).start() # id's for duplicates duplicates = [] for index, orig in enumerate(session.query(ArchiveEntry).yield_per(5)): bar.update(index) # item already processed if orig.id in duplicates: continue # item already migrated if orig.sources: log.info('Database looks like it has already been consolidated, ' 'item %s has already sources ...' % orig.title) session.rollback() return # add legacy task to the sources list orig.sources.append(get_source(orig.task, session)) # remove task, deprecated .. well, let's still keep it .. #orig.task = None for dupe in session.query(ArchiveEntry).\ filter(ArchiveEntry.id != orig.id).\ filter(ArchiveEntry.title == orig.title).\ filter(ArchiveEntry.url == orig.url).all(): orig.sources.append(get_source(dupe.task, session)) duplicates.append(dupe.id) if duplicates: log.info('Consolidated %i items, removing duplicates ...' % len(duplicates)) for id in duplicates: session.query(ArchiveEntry).filter(ArchiveEntry.id == id).delete() session.commit() log.info('Completed! This does NOT need to be ran again.') except KeyboardInterrupt: session.rollback() log.critical('Aborted, no changes saved') finally: session.close()
def display_details(self, name): """Display detailed series information, ie. --series NAME""" from flexget.manager import Session session = Session() name = unicode(name.lower()) series = session.query(Series).filter(Series.name == name).first() if not series: console("Unknown series `%s`" % name) return console(" %-63s%-15s" % ("Identifier, Title", "Quality")) console("-" * 79) # Query episodes in sane order instead of iterating from series.episodes episodes = session.query(Episode).filter(Episode.series_id == series.id) if series.identified_by == "sequence": episodes = episodes.order_by(Episode.number).all() else: episodes = episodes.order_by(Episode.identifier).all() for episode in episodes: if episode.identifier is None: console(" None <--- Broken!") else: console(" %s (%s) - %s" % (episode.identifier, episode.identified_by or "N/A", episode.age)) for release in episode.releases: status = release.quality.name title = release.title if len(title) > 55: title = title[:55] + "..." if release.proper_count > 0: status += "-proper" if release.proper_count > 1: status += str(release.proper_count) if release.downloaded: console(" * %-60s%-15s" % (title, status)) else: console(" %-60s%-15s" % (title, status)) console("-" * 79) console(" * = downloaded") if not series.identified_by: console("") console(" Series plugin is still learning which episode numbering mode is ") console(" correct for this series (identified_by: auto).") console(" Few duplicate downloads can happen with different numbering schemes") console(" during this time.") else: console(" Series uses `%s` mode to identify episode numbering (identified_by)." % series.identified_by) console(" See option `identified_by` for more information.") session.close()
def display_details(self, name): """Display detailed series information, ie. --series NAME""" from flexget.manager import Session session = Session() name = unicode(name.lower()) series = session.query(Series).filter(Series.name == name).first() if not series: print 'Unknown series `%s`' % name return print ' %-63s%-15s' % ('Identifier, Title', 'Quality') print '-' * 79 # Query episodes in sane order instead of iterating from series.episodes episodes = session.query(Episode).filter(Episode.series_id == series.id) if series.identified_by == 'sequence': episodes = episodes.order_by(Episode.number).all() else: episodes = episodes.order_by(Episode.identifier).all() for episode in episodes: if episode.identifier is None: print ' None <--- Broken!' else: print ' %s (%s) - %s' % (episode.identifier, episode.identified_by or 'N/A', episode.age) for release in episode.releases: status = release.quality.name title = release.title if len(title) > 55: title = title[:55] + '...' if release.proper_count > 0: status += '-proper' if release.proper_count > 1: status += str(release.proper_count) if release.downloaded: print ' * %-60s%-15s' % (title, status) else: print ' %-60s%-15s' % (title, status) print '-' * 79 print ' * = downloaded' if not series.identified_by: print '' print ' Series plugin is still learning which episode numbering mode is ' print ' correct for this series (identified_by: auto).' print ' Few duplicate downloads can happen with different numbering schemes' print ' during this time.' else: print ' Series uses `%s` mode to identify episode numbering (identified_by).' % series.identified_by print ' See option `identified_by` for more information.' session.close()
def do_cli(manager, options): session = Session() try: console('-- History: ' + '-' * 67) query = session.query(History) if options.search: search_term = options.search.replace(' ', '%').replace('.', '%') query = query.filter(History.title.like('%' + search_term + '%')) if options.task: query = query.filter(History.task.like('%' + options.task + '%')) query = query.order_by(desc(History.time)).limit(options.limit) for item in reversed(query.all()): if options.short: console(' %-25s %s' % (item.time.strftime("%c"), item.title)) else: console(' Task : %s' % item.task) console(' Title : %s' % item.title) console(' Url : %s' % item.url) if item.filename: console(' Stored : %s' % item.filename) console(' Time : %s' % item.time.strftime("%c")) console(' Details : %s' % item.details) console('-' * 79) finally: session.close()
def add_backlog(self, task, entry, amount=''): """Add single entry to task backlog If :amount: is not specified, entry will only be injected on next execution.""" snapshot = entry.snapshots.get('after_input') if not snapshot: if task.current_phase != 'input': # Not having a snapshot is normal during input phase, don't display a warning log.warning( 'No input snapshot available for `%s`, using current state' % entry['title']) snapshot = entry session = Session() expire_time = datetime.now() + parse_timedelta(amount) backlog_entry = session.query(BacklogEntry).filter(BacklogEntry.title == entry['title']).\ filter(BacklogEntry.task == task.name).first() if backlog_entry: # If there is already a backlog entry for this, update the expiry time if necessary. if backlog_entry.expire < expire_time: log.debug('Updating expiry time for %s' % entry['title']) backlog_entry.expire = expire_time else: log.debug('Saving %s' % entry['title']) backlog_entry = BacklogEntry() backlog_entry.title = entry['title'] backlog_entry.entry = snapshot backlog_entry.task = task.name backlog_entry.expire = expire_time session.add(backlog_entry) session.commit()
def test_seen_delete_all(self, mock_seen_search, api_client): session = Session() entry_list = session.query(SeenEntry).join(SeenField) mock_seen_search.return_value = entry_list # No params rsp = api_client.delete('/seen/') assert rsp.status_code == 404, 'Response code is %s' % rsp.status_code fields = { 'url': 'http://test.com/file.torrent', 'title': 'Test.Title', 'torrent_hash_id': 'dsfgsdfg34tq34tq34t' } entry = { 'local': False, 'reason': 'test_reason', 'task': 'test_task', 'title': 'Test.Title', 'fields': fields } rsp = api_client.json_post('/seen/', data=json.dumps(entry)) assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # With value rsp = api_client.delete('/seen/?value=Test.Title') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert mock_seen_search.call_count == 2, 'Should have 2 calls, is actually %s' % mock_seen_search.call_count
def set_version(plugin, version): if plugin not in plugin_schemas: raise ValueError( 'Tried to set schema version for %s plugin with no versioned_base.' % plugin) base_version = plugin_schemas[plugin]['version'] if version != base_version: raise ValueError('Tried to set %s plugin schema version to %d when ' 'it should be %d as defined in versioned_base.' % (plugin, version, base_version)) session = Session() try: schema = session.query(PluginSchema).filter( PluginSchema.plugin == plugin).first() if not schema: log.debug('Initializing plugin %s schema version to %i' % (plugin, version)) schema = PluginSchema(plugin, version) session.add(schema) else: if version < schema.version: raise ValueError( 'Tried to set plugin %s schema version to lower value' % plugin) if version != schema.version: log.debug('Updating plugin %s schema version to %i' % (plugin, version)) schema.version = version session.commit() finally: session.close()
def get_series_summary(self): """ :return: Dictionary where key is series name and value is dictionary of summary details. """ result = {} session = Session() try: for series in session.query(Series).all(): name = series.name # capitalize if user hasn't, better look and sorting ... if name.islower(): name = capwords(name) result[name] = {'identified_by': series.identified_by} episode = self.get_latest_download(series) if episode: latest = { 'first_seen': episode.first_seen, 'episode_instance': episode, 'episode_id': episode.identifier, 'age': episode.age, 'status': self.get_latest_status(episode), 'behind': self.new_eps_after(episode) } result[name]['latest'] = latest finally: session.close() return result
def on_process_start(self, task): if not task.manager.options.tail_reset: return task.manager.disable_tasks() from flexget.utils.simple_persistence import SimpleKeyValue from flexget.manager import Session session = Session() try: poses = session.query(SimpleKeyValue).filter( SimpleKeyValue.key == task.manager.options.tail_reset).all() if not poses: print 'No position stored for file %s' % task.manager.options.tail_reset print 'Note that file must give in same format as in config, ie. ~/logs/log can not be given as /home/user/logs/log' for pos in poses: if pos.value == 0: print 'Task %s tail position is already zero' % pos.task else: print 'Task %s tail position (%s) reseted to zero' % ( pos.task, pos.value) pos.value = 0 session.commit() finally: session.close()
def log_once(message, logger=logging.getLogger('log_once'), once_level=logging.INFO, suppressed_level=f_logger.VERBOSE): """ Log message only once using given logger`. Returns False if suppressed logging. When suppressed, `suppressed_level` level is still logged. """ # If there is no active manager, don't access the db from flexget.manager import manager if not manager: log.warning('DB not initialized. log_once will not work properly.') logger.log(once_level, message) return digest = hashlib.md5() digest.update(message.encode('latin1', 'replace')) # ticket:250 md5sum = digest.hexdigest() session = Session() try: # abort if this has already been logged if session.query(LogMessage).filter_by(md5sum=md5sum).first(): logger.log(suppressed_level, message) return False row = LogMessage(md5sum) session.add(row) session.commit() finally: session.close() logger.log(once_level, message) return True
def test_seen_get(self, mock_seen_search): session = Session() entry_list = session.query(SeenEntry).join(SeenField).all() mock_seen_search.return_value = entry_list # No params rsp = self.get('/seen/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # Default params rsp = self.get('/seen/?page=1&max=100&local_seen=true&sort_by=added&order=desc') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # Changed params rsp = self.get('/seen/?max=1000&local_seen=false&sort_by=title&order=asc') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # Negative test, invalid parameter rsp = self.get('/seen/?max=1000&local_seen=BLA&sort_by=title &order=asc') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code # With value rsp = self.get('/seen/?value=bla') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert mock_seen_search.call_count == 4, 'Should have 4 calls, is actually %s' % mock_seen_search.call_count
def forget_series_episode(name, identifier): """Remove all episodes by :identifier: from series :name: from database.""" session = Session() series = session.query(Series).filter(Series.name == name).first() if series: episode = session.query(Episode).filter(Episode.identifier == identifier).\ filter(Episode.series_id == series.id).first() if episode: series.identified_by = '' # reset identified_by flag so that it will be recalculated session.delete(episode) session.commit() log.debug('Episode %s from series %s removed from database.' % (identifier, name)) else: raise ValueError('Unknown identifier %s for series %s' % (identifier, name.capitalize())) else: raise ValueError('Unknown series %s' % name)
def test_seen_get(self, mock_seen_search): session = Session() entry_list = session.query(SeenEntry).join(SeenField).all() mock_seen_search.return_value = entry_list # No params rsp = self.get('/seen/') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # Default params rsp = self.get( '/seen/?page=1&max=100&local_seen=true&sort_by=added&order=desc') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # Changed params rsp = self.get( '/seen/?max=1000&local_seen=false&sort_by=title&order=asc') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code # Negative test, invalid parameter rsp = self.get( '/seen/?max=1000&local_seen=BLA&sort_by=title &order=asc') assert rsp.status_code == 400, 'Response code is %s' % rsp.status_code # With value rsp = self.get('/seen/?value=bla') assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code assert mock_seen_search.call_count == 4, 'Should have 4 calls, is actually %s' % mock_seen_search.call_count
def get_file(self, only_cached=False): """Makes sure the poster is downloaded to the local cache (in userstatic folder) and returns the path split into a list of directory and file components""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if self.file and os.path.isfile(os.path.join(base_dir, self.file)): return self.file.split(os.sep) elif only_cached: return # If we don't already have a local copy, download one. log.debug('Downloading poster %s' % self.url) dirname = os.path.join('tmdb', 'posters', str(self.movie_id)) # Create folders if they don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.url)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(requests.get(self.url).content) self.file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() poster = session.query(TMDBPoster).filter(TMDBPoster.db_id == self.db_id).first() if poster: poster.file = filename session.commit() session.close() return filename.split(os.sep)
def notify(self, title, message, config): if not message.strip(): return self._parse_config(config) session = Session() self._save_message(message, session) session.commit() message_list = session.query(MessageEntry).filter( MessageEntry.sent == False).all() try: if access_token := self._get_access_token(session, self._corp_id, self._corp_secret): for message_entry in message_list: self._send_msgs(message_entry, access_token) time.sleep(1) if message_entry.sent: session.delete(message_entry) session.commit() if self.image: self._send_images(access_token) except Exception as e: raise PluginError(str(e))
def get_series_summary(self): """ :return: Dictionary where key is series name and value is dictionary of summary details. """ result = {} session = Session() try: for series in session.query(Series).all(): name = series.name # capitalize if user hasn't, better look and sorting ... if name.islower(): name = capwords(name) result[name] = {"identified_by": series.identified_by} episode = self.get_latest_download(series) if episode: latest = { "first_seen": episode.first_seen, "episode_instance": episode, "episode_id": episode.identifier, "age": episode.age, "status": self.get_latest_status(episode), "behind": self.new_eps_after(episode), } result[name]["latest"] = latest finally: session.close() return result
def log_once(message, logger=logging.getLogger('log_once')): """ Log message only once using given logger. Returns False if suppressed logging. When suppressed verbose level is still logged. """ digest = hashlib.md5() digest.update(message.encode('latin1', 'replace')) # ticket:250 md5sum = digest.hexdigest() session = Session() try: # abort if this has already been logged if session.query(LogMessage).filter_by(md5sum=md5sum).first(): logger.verbose(message) return False row = LogMessage(md5sum) session.add(row) session.commit() finally: session.close() logger.info(message) return True
def get_file(self, only_cached=False): """Makes sure the poster is downloaded to the local cache (in userstatic folder) and returns the path split into a list of directory and file components""" from flexget.manager import manager base_dir = os.path.join(manager.config_base, 'userstatic') if self.file and os.path.isfile(os.path.join(base_dir, self.file)): return self.file.split(os.sep) elif only_cached: return # If we don't already have a local copy, download one. log.debug('Downloading poster %s' % self.url) dirname = os.path.join('tmdb', 'posters', str(self.movie_id)) # Create folders if they don't exist fullpath = os.path.join(base_dir, dirname) if not os.path.isdir(fullpath): os.makedirs(fullpath) filename = os.path.join(dirname, posixpath.basename(self.url)) thefile = file(os.path.join(base_dir, filename), 'wb') thefile.write(urlopener(self.url, log).read()) self.file = filename # If we are detached from a session, update the db if not Session.object_session(self): session = Session() poster = session.query(TMDBPoster).filter( TMDBPoster.db_id == self.db_id).first() if poster: poster.file = filename session.commit() session.close() return filename.split(os.sep)
def queue_add(self, title=None, imdb_id=None, quality='ANY', force=True): """Add an item to the queue with the specified quality""" if not title or not imdb_id: # We don't have all the info we need to add movie, do a lookup for more info result = self.parse_what(imdb_id or title) title = result['title'] imdb_id = result['imdb_id'] quality = self.validate_quality(quality) session = Session() # check if the item is already queued item = session.query(QueuedMovie).filter( QueuedMovie.imdb_id == imdb_id).first() if not item: #TODO: fix item = QueuedMovie(imdb_id=imdb_id, quality=quality, immortal=force, title=title) session.add(item) session.commit() session.close() return { 'title': title, 'imdb_id': imdb_id, 'quality': quality, 'force': force } else: raise QueueError('ERROR: %s is already in the queue' % title)
def assert_series_count_in_db(expected_count): from flexget.plugins.filter.series import Series from flexget.manager import Session session = Session() actual_series_count = session.query(Series).count() assert expected_count == actual_series_count, "expecting %s series stored in db, got %s instead" % \ (expected_count, actual_series_count)
def add_backlog(self, task, entry, amount=''): """Add single entry to task backlog If :amount: is not specified, entry will only be injected on next execution.""" snapshot = entry.snapshots.get('after_input') if not snapshot: if task.current_phase != 'input': # Not having a snapshot is normal during input phase, don't display a warning log.warning('No input snapshot available for `%s`, using current state' % entry['title']) snapshot = entry session = Session() expire_time = datetime.now() + parse_timedelta(amount) backlog_entry = session.query(BacklogEntry).filter(BacklogEntry.title == entry['title']).\ filter(BacklogEntry.task == task.name).first() if backlog_entry: # If there is already a backlog entry for this, update the expiry time if necessary. if backlog_entry.expire < expire_time: log.debug('Updating expiry time for %s' % entry['title']) backlog_entry.expire = expire_time else: log.debug('Saving %s' % entry['title']) backlog_entry = BacklogEntry() backlog_entry.title = entry['title'] backlog_entry.entry = snapshot backlog_entry.task = task.name backlog_entry.expire = expire_time session.add(backlog_entry) session.commit()
def clear_backlog(manager): if not manager.options.clear_backlog: return manager.disable_tasks() session = Session() num = session.query(BacklogEntry).delete() session.close() console('%s entries cleared from backlog.' % num)
def get_login_cookies(self, username, password): url_auth = 'http://www.t411.me/users/login' db_session = Session() account = db_session.query(torrent411Account).filter( torrent411Account.username == username).first() if account: if account.expiry_time < datetime.now(): db_session.delete(account) db_session.commit() log.debug("Cookies found in db!") return account.auth else: log.debug("Getting login cookies from : %s " % url_auth) params = urllib.urlencode({ 'login': username, 'password': password, 'remember': '1' }) cj = cookielib.CookieJar() # WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) # NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK opener.addheaders = [('User-agent', self.USER_AGENT)] try: opener.open(url_auth, params) except Exception as e: raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e)) authKey = None uid = None password = None for cookie in cj: if cookie.name == "authKey": authKey = cookie.value if cookie.name == "uid": uid = cookie.value if cookie.name == "pass": password = cookie.value if authKey is not None and \ uid is not None and \ password is not None: authCookie = { 'uid': uid, 'password': password, 'authKey': authKey } db_session.add( torrent411Account(username=username, auth=authCookie, expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() return authCookie return {"uid": "", "password": "", "authKey": ""}
def display_details(self): """Display detailed series information, ie. --series NAME""" from flexget.manager import Session session = Session() name = unicode(self.options['name'].lower()) series = session.query(Series).filter(Series.name == name).first() if not series: print 'Unknown series `%s`' % name return print '%s is in identified_by `%s` mode.' % (series.name, series.identified_by or 'auto') print ' %-63s%-15s' % ('Identifier, Title', 'Quality') print '-' * 79 # Query episodes in sane order instead of iterating from series.episodes episodes = session.query(Episode).filter(Episode.series_id == series.id).\ order_by(Episode.identifier).all() for episode in episodes: if episode.identifier is None: print ' None <--- Broken!' else: print ' %s (%s) - %s' % (episode.identifier, episode.identified_by, episode.age) for release in episode.releases: status = release.quality.name title = release.title if len(title) > 55: title = title[:55] + '...' if release.proper_count > 0: status += '-proper' if release.proper_count > 1: status += str(release.proper_count) if release.downloaded: print ' * %-60s%-15s' % (title, status) else: print ' %-60s%-15s' % (title, status) print '-' * 79 print ' * = downloaded' session.close()
def assert_series_count_in_db(expected_count): from flexget.components.series.db import Series from flexget.manager import Session session = Session() actual_series_count = session.query(Series).count() assert ( expected_count == actual_series_count ), "expecting %s series stored in db, got %s instead" % (expected_count, actual_series_count)
def get_login_cookies(self, username, password): url_auth = 'http://www.t411.li/users/login' db_session = Session() account = db_session.query(torrent411Account).filter( torrent411Account.username == username).first() if account: if account.expiry_time < datetime.now(): db_session.delete(account) db_session.commit() log.debug("Cookies found in db!") return account.auth else: log.debug("Getting login cookies from : %s " % url_auth) params = {'login': username, 'password': password, 'remember': '1'} cj = http.cookiejar.CookieJar() # WE NEED A COOKIE HOOK HERE TO AVOID REDIRECT COOKIES opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) # NEED TO BE SAME USER_AGENT THAN DOWNLOAD LINK opener.addheaders = [('User-agent', self.USER_AGENT)] login_output = None try: login_output = opener.open(url_auth, urllib.parse.urlencode(params)).read() except Exception as e: raise UrlRewritingError("Connection Error for %s : %s" % (url_auth, e)) if b'confirmer le captcha' in login_output: log.warning("Captcha requested for login.") login_output = self._solveCaptcha(login_output, url_auth, params, opener) if b'logout' in login_output: authKey = None uid = None password = None for cookie in cj: if cookie.name == "authKey": authKey = cookie.value if cookie.name == "uid": uid = cookie.value if cookie.name == "pass": password = cookie.value if authKey is not None and \ uid is not None and \ password is not None: authCookie = {'uid': uid, 'password': password, 'authKey': authKey } db_session.add(torrent411Account(username=username, auth=authCookie, expiry_time=datetime.now() + timedelta(days=1))) db_session.commit() return authCookie else: log.error("Login failed (Torrent411). Check your login and password.") return {}
def assert_series_count_in_db(expected_count): from flexget.components.series.db import Series from flexget.manager import Session session = Session() actual_series_count = session.query(Series).count() assert (expected_count == actual_series_count ), "expecting %s series stored in db, got %s instead" % ( expected_count, actual_series_count)
def _get_db_last_run(self): session = Session() try: db_trigger = session.query(DBTrigger).get(self.uid) if db_trigger: self.last_run = db_trigger.last_run log.debug('loaded last_run from the database') finally: session.close()
def _get_db_last_run(self): session = Session() try: db_trigger = session.query(DBTrigger).get(self.uid) if db_trigger: self.last_run = db_trigger.last_run log.debug("loaded last_run from the database") finally: session.close()
def display_summary(options): """ Display series summary. :param options: argparse options from the CLI """ formatting = ' %-30s %-10s %-10s %-20s' console(formatting % ('Name', 'Latest', 'Age', 'Downloaded')) console('-' * 79) session = Session() try: query = (session.query(Series).outerjoin(Series.episodes).outerjoin(Episode.releases). outerjoin(Series.in_tasks).group_by(Series.id)) if options.configured == 'configured': query = query.having(func.count(SeriesTask.id) >= 1) elif options.configured == 'unconfigured': query = query.having(func.count(SeriesTask.id) < 1) if options.premieres: query = (query.having(func.max(Episode.season) <= 1).having(func.max(Episode.number) <= 2). having(func.count(SeriesTask.id) < 1)).filter(Release.downloaded == True) if options.new: query = query.having(func.max(Episode.first_seen) > datetime.now() - timedelta(days=options.new)) if options.stale: query = query.having(func.max(Episode.first_seen) < datetime.now() - timedelta(days=options.stale)) for series in query.order_by(Series.name).yield_per(10): series_name = series.name if len(series_name) > 30: series_name = series_name[:27] + '...' new_ep = ' ' behind = 0 status = 'N/A' age = 'N/A' episode_id = 'N/A' latest = get_latest_release(series) if latest: if latest.first_seen > datetime.now() - timedelta(days=2): new_ep = '>' behind = new_eps_after(latest) status = get_latest_status(latest) age = latest.age episode_id = latest.identifier if behind: episode_id += ' +%s' % behind console(new_ep + formatting[1:] % (series_name, episode_id, age, status)) if behind >= 3: console(' ! Latest download is %d episodes behind, this may require ' 'manual intervention' % behind) console('-' * 79) console(' > = new episode ') console(' Use `flexget series show NAME` to get detailed information') finally: session.close()
def clear_failed(manager): session = Session() try: results = session.query(FailedEntry).delete() console('Cleared %i items.' % results) session.commit() if results: manager.config_changed() finally: session.close()
def clear_rejected(manager): session = Session() try: results = session.query(RememberEntry).delete() console('Cleared %i items.' % results) session.commit() if results: manager.config_changed() finally: session.close()
def purge(): """Purge old messages from database""" old = datetime.now() - timedelta(days=365) session = Session() try: for message in session.query(LogMessage).filter(LogMessage.added < old): log.debug('purging: %s' % message) session.delete(message) finally: session.commit()
def clear_rejected(manager): session = Session() try: results = session.query(RememberEntry).delete() console("Cleared %i items." % results) session.commit() if results: manager.config_changed() finally: session.close()
def clear_failed(self): """Clears list of failed entries""" session = Session() try: results = session.query(FailedEntry).all() for row in results: session.delete(row) console('Cleared %i items.' % len(results)) session.commit() finally: session.close()
def upgrade_required(): """Returns true if an upgrade of the database is required.""" session = Session() try: for old_schema in session.query(PluginSchema).all(): if old_schema.plugin in plugin_schemas and old_schema.version < plugin_schemas[ old_schema.plugin]['version']: return True return False finally: session.close()
def list_failed(): session = Session() try: results = session.query(FailedEntry).all() if not results: console('No failed entries recorded') for entry in results: console('%16s - %s - %s times - %s' % (entry.tof.strftime('%Y-%m-%d %H:%M'), entry.title, entry.count, entry.reason)) finally: session.close()
def config_changed(task): """Forces config_modified flag to come out true on next run. Used when the db changes, and all entries need to be reprocessed.""" log.debug('Marking config as changed.') session = Session() try: task_hash = session.query(TaskConfigHash).filter(TaskConfigHash.task == task).first() if task_hash: task_hash.hash = '' session.commit() finally: session.close()
def _set_db_last_run(self): session = Session() try: db_trigger = session.query(DBTrigger).get(self.uid) if not db_trigger: db_trigger = DBTrigger(self.uid) session.add(db_trigger) db_trigger.last_run = self.last_run session.commit() finally: session.close() log.debug('recorded last_run to the database')
def get_version(plugin): session = Session() try: schema = session.query(PluginSchema).filter( PluginSchema.plugin == plugin).first() if not schema: log.debug('No schema version stored for %s' % plugin) return None else: return schema.version finally: session.close()