s = raw_input('--> ') if s == 'exit': break if s == 'abort' or s == 'continue': self.abort = True break except EOFError: break count = 0 for entry in task.entries: try: match, field = self.matches(entry, s) if match: print 'Title: %-40s URL: %-30s From: %s' % ( entry['title'], entry['url'], field) count += 1 except: print 'Invalid regular expression' break print '%s of %s entries matched' % (count, len(task.entries)) print 'Bye!' register_plugin(PluginTryRegexp, '--try-regexp', builtin=True) register_parser_option('--try-regexp', action='store_true', dest='try_regexp', default=False, help='Try regular expressions interactively.')
widgets = ['Benchmarking - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=len(imdb_urls)).start() log_query_count('test') start_time = time.time() for index, url in enumerate(imdb_urls): bar.update(index) #movie = session.query(Movie).filter(Movie.url == url).first() #movie = session.query(Movie).options(subqueryload(Movie.genres)).filter(Movie.url == url).one() movie = session.query(Movie).\ options(joinedload_all(Movie.genres, Movie.languages, Movie.actors, Movie.directors)).\ filter(Movie.url == url).first() # access it's members so they're loaded var = [x.name for x in movie.genres] var = [x.name for x in movie.directors] var = [x.name for x in movie.actors] var = [x.name for x in movie.languages] log_query_count('test') took = time.time() - start_time log.debug('Took %.2f seconds to query %i movies' % (took, len(imdb_urls))) register_plugin(PerfTests, 'perftests', api_ver=2, debug=True, builtin=True) register_parser_option('--perf-test', action='store', dest='perf_test', default='', help=SUPPRESS)
task.manager.disable_tasks() name = unicode(task.manager.options.series_forget[0]) if len(task.manager.options.series_forget) > 1: # remove by id identifier = task.manager.options.series_forget[1].upper() if identifier and name: try: forget_series_episode(name, identifier) print 'Removed episode `%s` from series `%s`.' % (identifier, name.capitalize()) except ValueError, e: print e.message else: # remove whole series try: forget_series(name) print 'Removed series `%s` from database.' % name.capitalize() except ValueError, e: print e.message task.manager.config_changed() register_plugin(SeriesReport, '--series', builtin=True) register_plugin(SeriesForget, '--series-forget', builtin=True) register_parser_option('--series', nargs='?', const=True, help='Display series summary.') register_parser_option('--series-forget', nargs='1-2', metavar=('NAME', 'EP_ID'), help='Remove complete series or single episode from database: <NAME> [EPISODE]')
widgets = ['Benchmarking - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=len(imdb_urls)).start() log_query_count('test') start_time = time.time() for index, url in enumerate(imdb_urls): bar.update(index) #movie = session.query(Movie).filter(Movie.url == url).first() #movie = session.query(Movie).options(subqueryload(Movie.genres)).filter(Movie.url == url).one() movie = session.query(Movie).\ options(joinedload_all(Movie.genres, Movie.languages, Movie.actors, Movie.directors)).\ filter(Movie.url == url).first() # access it's members so they're loaded var = [x.name for x in movie.genres] var = [x.name for x in movie.directors] var = [x.name for x in movie.actors] var = [x.name for x in movie.languages] log_query_count('test') took = time.time() - start_time log.debug('Took %.2f seconds to query %i movies' % (took, len(imdb_urls))) register_plugin(PerfTests, 'perftests', api_ver=2, debug=True, builtin=True) register_parser_option('--perf-test', action='store', dest='perf_test', default='', help=SUPPRESS_HELP)
for entry in task.failed: item = task.session.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['original_url']).first() if item: # Do not count the failure on this run when adding additional retry time fail_count = item.count - 1 # Don't bother saving this if it has met max retries if fail_count >= config['max_retries']: continue # Timedeltas do not allow floating point multiplication. Convert to seconds and then back to avoid this. base_retry_secs = base_retry_time.days * 86400 + base_retry_time.seconds retry_secs = base_retry_secs * (retry_time_multiplier ** fail_count) retry_time = timedelta(seconds=retry_secs) else: retry_time = base_retry_time if self.backlog: self.backlog.add_backlog(task, entry, amount=retry_time) if retry_time: fail_reason = item.reason if item else entry.get('reason', 'unknown') task.reject(entry, reason='Waiting before trying failed entry again. (failure reason: %s)' % fail_reason, remember_time=retry_time) # Cause a task rerun, to look for alternate releases task.rerun() register_plugin(PluginFailed, '--failed', builtin=True, api_ver=2) register_plugin(FilterRetryFailed, 'retry_failed', builtin=True, api_ver=2) register_parser_option('--failed', action='store_true', dest='failed', default=0, help='List recently failed entries.') register_parser_option('--clear', action='store_true', dest='clear_failed', default=0, help='Clear recently failed list.')
se = task.session.query(SeenEntry).filter(SeenEntry.title == title).first() if se: log.debug("Forgotten '%s' (%s fields)" % (title, len(se.fields))) task.session.delete(se) return True @event('manager.db_cleanup') def db_cleanup(session): log.debug('TODO: Disabled because of ticket #1321') return # Remove seen fields over a year old result = session.query(SeenField).filter(SeenField.added < datetime.now() - timedelta(days=365)).delete() if result: log.verbose('Removed %d seen fields older than 1 year.' % result) register_plugin(FilterSeen, 'seen', builtin=True, api_ver=2) register_plugin(SeenSearch, '--seen-search', builtin=True) register_plugin(SeenCmd, '--seen', builtin=True) register_plugin(SeenForget, '--forget', builtin=True) register_plugin(MigrateSeen, 'migrate_seen', builtin=True) register_parser_option('--forget', action='store', dest='forget', default=False, metavar='TASK|VALUE', help='Forget task (completely) or given title or url.') register_parser_option('--seen', action='store', dest='seen', default=False, metavar='VALUE', help='Add title or url to what has been seen in tasks.') register_parser_option('--seen-search', action='store', dest='seen_search', default=False, metavar='VALUE', help='Search given text from seen database.')
usage = ''' Usage for --archive ACTION args, these are subjected to change in near future. consolidate Migrate old archive data to new model, may take a long time. search [@TAG]s KEYWORDS Search from the archive. inject ID [ID] [yes] Inject as accepted from archive by ID\'s. If yes is given immortal flag will be used.') tag-source SRC TAG [TAG] Tag all archived items within source with given tag.''' if not values: raise ArgumentError(self, usage) action = values[0].lower() if action not in ArchiveCli.ACTIONS: raise ArgumentError(self, usage) options['action'] = action options['args'] = [unicode(arg) for arg in values[1:]] register_plugin(Archive, 'archive', api_ver=2) register_plugin(ArchiveInject, 'archive_inject', api_ver=2, builtin=True) register_plugin(UrlrewriteArchive, 'flexget_archive', groups=['search']) register_plugin(ArchiveCli, '--archive-cli', builtin=True, api_ver=2) register_parser_option( '--archive', nargs='*', action=ArchiveCLIAction, metavar=('ACTION', 'ARGS'), help='Access [search|inject|tag-source|consolidate] functionalities. ' 'Without any args display help about those.')
def __iter__(self): log.info('Query:\n\t%s' % unicode(self).replace('\n', '\n\t')) explain = self.session.execute(Explain(self)).fetchall() text = '\n\t'.join('|'.join(str(x) for x in line) for line in explain) before = time() result = Query.__iter__(self) log.info('Query Time: %0.3f Explain Query Plan:\n\t%s' % (time() - before, text)) return result @event('manager.execute.started') def register_sql_explain(man): if man.options.explain_sql: maininit = manager.Session.__init__ def init(*args, **kwargs): kwargs['query_cls'] = ExplainQuery return maininit(*args, **kwargs) manager.Session.__init__ = init @event('manager.execute.completed') def deregister_sql_explain(man): if man.options.explain_sql: manager.Session = sessionmaker() register_parser_option('--explain-sql', action='store_true', dest='explain_sql', default=False, help=SUPPRESS_HELP)
def on_task_output(self, task): if 'dump' not in task.config and not task.manager.options.dump_entries: return #from flexget.utils.tools import sanitize #import yaml eval_lazy = task.manager.options.dump_entries == 'eval' trace = task.manager.options.dump_entries == 'trace' undecided = [ entry for entry in task.entries if not entry in task.accepted ] if undecided: console('-- Undecided: --------------------------') dump(undecided, task.manager.options.debug, eval_lazy, trace) if task.accepted: console('-- Accepted: ---------------------------') dump(task.accepted, task.manager.options.debug, eval_lazy, trace) if task.rejected: console('-- Rejected: ---------------------------') dump(task.rejected, task.manager.options.debug, eval_lazy, trace) register_plugin(OutputDump, 'dump', builtin=True) register_parser_option('--dump', nargs='?', choices=['eval', 'trace'], const=True, dest='dump_entries', help='Display all entries in task with details. ' 'Arg `--dump eval` will evaluate all lazy fields.')
while trimmed and not trimmed[0]: trimmed.pop(0) # Return a single string: return '\n'.join(trimmed) @event('manager.startup') def print_doc(manager): if manager.options.doc: manager.disable_tasks() plugin_name = manager.options.doc plugin = plugins.get(plugin_name, None) if plugin: if not plugin.instance.__doc__: print 'Plugin %s does not have documentation' % plugin_name else: print '' print trim(plugin.instance.__doc__) print '' else: print 'Could not find plugin %s' % plugin_name register_parser_option( '--doc', action='store', dest='doc', default=False, metavar='PLUGIN', help='Display plugin documentation. See also --plugins.')
session.close() def on_task_exit(self, task): """Add accepted entries to history""" for entry in task.accepted: item = History() item.task = task.name item.filename = entry.get('output', None) item.title = entry['title'] item.url = entry['url'] reason = '' if 'reason' in entry: reason = ' (reason: %s)' % entry['reason'] item.details = 'Accepted by %s%s' % (entry.get( 'accepted_by', '<unknown>'), reason) task.session.add(item) register_plugin(PluginHistory, '--history', builtin=True) register_parser_option('--history', action='store_true', dest='history', default=False, help='List 50 latest accepted entries.') register_parser_option('--downloads', action='store_true', dest='history', default=False, help=SUPPRESS)
""" Dumps task config in STDOUT in yaml at exit or abort event. """ @priority(-255) def on_task_start(self, task): if task.manager.options.dump_config: import yaml print '--- config from task: %s' % task.name print yaml.safe_dump(task.config) print '---' task.abort(silent=True) if task.manager.options.dump_config_python: print task.config task.abort(silent=True) register_plugin(OutputDumpConfig, 'dump_config', debug=True, builtin=True) register_parser_option( '--dump-config', action='store_true', dest='dump_config', default=False, help= 'Display the config of each feed after preset merging/config generation occurs.' ) register_parser_option('--dump-config-python', action='store_true', dest='dump_config_python', default=False, help=SUPPRESS)
return # verbose undecided entries if task.manager.options.verbose: undecided = False for entry in task.entries: if entry in task.accepted: continue undecided = True log.verbose('UNDECIDED: `%s`' % entry['title']) if undecided: log_once( 'Undecided entries have not been accepted or rejected. If you expected these to reach output,' ' you must set up filter plugin(s) to accept them.', logger=log) register_plugin(Verbose, 'verbose', builtin=True) register_parser_option('-v', '--verbose', action='store_true', dest='verbose', default=False, help='Verbose undecided entries.') register_parser_option( '-s', '--silent', action='store_true', dest='silent', default=False, help='Don\'t verbose any actions (accept, reject, fail).')
self.heapy = None def validator(self): from flexget import validator return validator.factory('boolean') def on_process_start(self, task): if not task.manager.options.mem_usage: return # start only once if self.heapy: return self.heapy = hpy() def on_process_end(self, task): if not task.manager.options.mem_usage: return # prevents running this multiple times ... if not self.heapy: return print 'Calculating memory usage:' print self.heapy.heap() print '-' * 79 print self.heapy.heap().get_rp(40) self.heapy = None register_plugin(OutputMemUsage, 'mem_usage', builtin=True) register_parser_option('--mem-usage', action='store_true', dest='mem_usage', default=False, help='Display memory usage debug information')
entries.append(entry) else: log.info('Invalid data, entry field %s is already found once. Ignoring entry.' % field) # start new entry entry = Entry() used = {} # add field to entry entry[field] = match.group(1) used[field] = True log.debug('found field: %s value: %s' % (field, entry[field])) # if all fields have been found if len(used) == len(entry_config): # check that entry has at least title and url if not entry.isvalid(): log.info('Invalid data, constructed entry is missing mandatory fields (title or url)') else: self.format_entry(entry, format_config) entries.append(entry) log.debug('Added entry %s' % entry) # start new entry entry = Entry() used = {} return entries register_plugin(InputTail, 'tail', api_ver=2) register_plugin(ResetTail, '--tail-reset', builtin=True) register_parser_option('--tail-reset', action='store', dest='tail_reset', default=False, metavar='FILE', help='Reset tail position for a file.')
import logging from flexget.plugin import register_parser_option from flexget.event import event from flexget.schema import reset_schema from flexget.utils.tools import console log = logging.getLogger('reset_plugin') @event('manager.upgrade', priority=255) def reset_plugin(manager): if not manager.options.reset_plugin: return manager.disable_tasks() plugin = manager.options.reset_plugin try: reset_schema(plugin) console('The database for `%s` has been reset.' % plugin) except ValueError, e: console('Unable to reset %s: %s' % (plugin, e.message)) register_parser_option('--reset-plugin', action='store', dest='reset_plugin', default=None, metavar='PLUGIN', help='Reset the database for given PLUGIN')
for f in onlytasks): task.enabled = False class ManualTask(object): """Only execute task when specified with --task""" def validator(self): from flexget import validator return validator.factory('boolean') def on_process_start(self, task): # Make sure we need to run if not task.config['manual']: return # If --task hasn't been specified disable this plugin if not task.manager.options.onlytask: log.debug('Disabling task %s' % task.name) task.enabled = False register_plugin(OnlyTask, '--task', builtin=True) register_plugin(ManualTask, 'manual') register_parser_option( '--task', action='store', dest='onlytask', default=None, metavar='TASK[,...]', help='Run only specified task(s), optionally using glob patterns ("tv-*").' ' Matching is case-insensitive.')
@event('feed.execute.before_plugin') def before(feed, keyword): fd = _start.setdefault(feed.name, {}) fd.setdefault('time', {})[keyword] = time.time() fd.setdefault('queries', {})[keyword] = query_count @event('feed.execute.after_plugin') def after(feed, keyword): took = time.time() - _start[feed.name]['time'][keyword] queries = query_count - _start[feed.name]['queries'][keyword] # Store results, increases previous values pd = performance.setdefault(feed.name, {}) data = pd.setdefault(keyword, {}) data['took'] = data.get('took', 0) + took data['queries'] = data.get('queries', 0) + queries @event('manager.execute.completed') def results(manager): for name, data in performance.iteritems(): log.info('Performance results for feed %s:' % name) for keyword, results in data.iteritems(): took = results['took'] queries = results['queries'] if took > 0.1 or queries > 10: log.info('%-15s took %0.2f sec (%s queries)' % (keyword, took, queries)) register_parser_option('--debug-perf', action='store_true', dest='debug_perf', default=False, help=SUPPRESS_HELP)
class OutputDump(object): """ Dummy plugin for testing, outputs all entries to stdout """ def validator(self): from flexget import validator return validator.factory('boolean') def on_feed_output(self, feed): if not 'dump' in feed.config and not feed.manager.options.dump_entries: return #from flexget.utils.tools import sanitize #import yaml undecided = [entry for entry in feed.entries if not entry in feed.accepted] if undecided: print '-- Undecided: --------------------------' dump(undecided, feed.manager.options.debug) if feed.accepted: print '-- Accepted: ---------------------------' dump(feed.accepted, feed.manager.options.debug) if feed.rejected: print '-- Rejected: ---------------------------' dump(feed.rejected, feed.manager.options.debug) register_plugin(OutputDump, 'dump', builtin=True) register_parser_option('--dump', action='store_true', dest='dump_entries', default=False, help='Display all feed entries')
print 'Hi there, welcome to try regexps in realtime!' print 'Press ^D or type \'exit\' to continue. Type \'continue\' to continue non-interactive execution.' print 'Task \'%s\' has %s entries, enter regexp to see what matches it.' % (task.name, len(task.entries)) while (True): try: s = raw_input('--> ') if s == 'exit': break if s == 'abort' or s == 'continue': self.abort = True break except EOFError: break count = 0 for entry in task.entries: try: match, field = self.matches(entry, s) if match: print 'Title: %-40s URL: %-30s From: %s' % (entry['title'], entry['url'], field) count += 1 except: print 'Invalid regular expression' break print '%s of %s entries matched' % (count, len(task.entries)) print 'Bye!' register_plugin(PluginTryRegexp, '--try-regexp', builtin=True) register_parser_option('--try-regexp', action='store_true', dest='try_regexp', default=False, help='Try regular expressions interactively.')
# Make a new dict with replacements done on keys and values return dict(map(self.replace_item, kv_pair) for kv_pair in item.iteritems()) else: # We don't know how to do replacements on this item, just return it return item def parse_replaces(self, feed): """Parses commandline string into internal dict""" s = feed.manager.options.cli_config if not s: return False # nothing to process if self.replaces: return True # already parsed for item in s.split(','): try: key, value = item.split('=') except ValueError: log.critical('Invalid --cli-config, no name for %s' % item) continue self.replaces[key.strip()] = value.strip() return True def on_process_start(self, feed): if self.parse_replaces(feed): feed.config = self.replace_item(feed.config) log.debug(feed.config) register_plugin(CliConfig, 'cli_config', builtin=True) register_parser_option('--cli-config', action='store', dest='cli_config', default=False, metavar='PARAMS', help='Configuration parameters trough commandline. See --doc cli_config.')
def plugins_summary(manager): if manager.options.plugins: manager.disable_tasks() print '-' * 79 print '%-20s%-30s%s' % ('Name', 'Roles (priority)', 'Info') print '-' * 79 # print the list for name in sorted(plugins): plugin = plugins[name] # do not include test classes, unless in debug mode if plugin.get('debug_plugin', False) and not manager.options.debug: continue flags = [] if plugin.instance.__doc__: flags.append('--doc') if plugin.builtin: flags.append('builtin') if plugin.debug: flags.append('debug') handlers = plugin.phase_handlers roles = ', '.join('%s(%s)' % (phase, handlers[phase].priority) for phase in handlers) print '%-20s%-30s%s' % (name, roles, ', '.join(flags)) print '-' * 79 register_parser_option('--plugins', action='store_true', dest='plugins', default=False, help='Print registered plugins summary') register_parser_option('--list', action='store_true', dest='plugins', default=False, help=SUPPRESS)
def twitter_auth(self): try: import tweepy except: raise PluginError print "Please input your Consumer key/secret, if you do not have one register for one on http://dev.twitter.com/" print "" consumer_key = raw_input('Consumer Key: ').strip() consumer_secret = raw_input('Consumer Secret: ').strip() print "Attepting to authenticate..." auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth_url = auth.get_authorization_url() print "Please open the following URL in a browser and autheticate Flexget to use the Twitter account you wish to output to:" print auth_url print "Once completed, please provide the PIN code that Twitter returned" verifier = raw_input('PIN: ').strip() auth.get_access_token(verifier) print "Please add the following to your config.yml, either under your tasks or global as required" print "" print "twitter:" print " consumerkey: %s" % consumer_key print " consumersecret: %s" % consumer_secret print " accesskey: %s" % auth.access_token.key print " accesssecret: %s" % auth.access_token.secret register_plugin(TwitterAuth, '--twitter-auth', builtin=True) register_parser_option('--twitter-auth', nargs='?', const=True, help='Authenticate with Twitter.')
print ' Task : %s' % item.task print ' Title : %s' % item.title.encode('utf-8') print ' Url : %s' % item.url if item.filename: print ' Stored : %s' % item.filename print ' Time : %s' % item.time.strftime("%c") print ' Details : %s' % item.details print '-' * 79 session.close() def on_task_exit(self, task): """Add accepted entries to history""" for entry in task.accepted: item = History() item.task = task.name item.filename = entry.get('output', None) item.title = entry['title'] item.url = entry['url'] reason = '' if 'reason' in entry: reason = ' (reason: %s)' % entry['reason'] item.details = 'Accepted by %s%s' % (entry.get('accepted_by', '<unknown>'), reason) task.session.add(item) register_plugin(PluginHistory, '--history', builtin=True) register_parser_option('--history', action='store_true', dest='history', default=False, help='List 50 latest accepted entries.') register_parser_option('--downloads', action='store_true', dest='history', default=False, help=SUPPRESS)
line_len = 0 for name in v['tables']: if line_len + len(name) + 2 >= 53: tables += '\n' tables += ' ' * 25 line_len = len(name) + 2 else: line_len += len(name) + 2 tables += name + ', ' tables = tables.rstrip(', ') console('%-20s %s %s' % (k, v['version'], tables)) else: try: reset_schema(plugin) console('The database for `%s` has been reset.' % plugin) except ValueError as e: console('Unable to reset %s: %s' % (plugin, e.message)) register_parser_option( '--reset-plugin', action='store', nargs='?', dest='reset_plugin', const='__list__', default=None, metavar='PLUGIN', help= 'Reset the database for given PLUGIN. List known names without PLUGIN argument.' )
if isinstance(name, dict): # assume the name is the first/only key in the dict. name, search_config = name.items()[0] log.verbose('Searching `%s` from %s' % (entry['title'], name)) try: results = plugins[name].search(entry, search_config) matcher = SequenceMatcher(a=entry['title']) for result in results: matcher.set_seq2(result['title']) if matcher.ratio() > 0.9: log.debug('Found url: %s', result['url']) entry['url'] = result['url'] break else: continue break except (PluginError, PluginWarning) as pw: log.verbose('Failed: %s' % pw.value) continue # Search failed else: # If I don't have a URL, doesn't matter if I'm immortal... entry['immortal'] = False entry.reject('search failed') register_plugin(PluginSearch, 'urlrewrite_search', api_ver=2) register_plugin(SearchPlugins, '--search-plugins', builtin=True) register_parser_option('--search-plugins', action='store_true', dest='search_plugins', default=False, help='List supported search plugins.')
"""Add accepted entries to history""" if config is False: return # Explicitly disabled with configuration for entry in task.accepted: item = History() item.task = task.name item.filename = entry.get('output', None) item.title = entry['title'] item.url = entry['url'] reason = '' if 'reason' in entry: reason = ' (reason: %s)' % entry['reason'] item.details = 'Accepted by %s%s' % (entry.get( 'accepted_by', '<unknown>'), reason) task.session.add(item) register_plugin(PluginHistory, 'history', builtin=True, api_ver=2) register_parser_option('--history', action='store', nargs='?', dest='history', const=50, help='List latest accepted entries. Default: 50') register_parser_option('--downloads', action='store_true', dest='history', default=False, help=SUPPRESS)
task.session.add( RememberEntry(title=entry['title'], url=entry['original_url'], task_id=remember_task_id, rejected_by=task.current_plugin, reason=kwargs.get('reason'), expires=expires)) # The test stops passing when this is taken out for some reason... task.session.flush() @event('manager.db_cleanup') def db_cleanup(session): # Remove entries older than 30 days result = session.query(RememberEntry).filter( RememberEntry.added < datetime.now() - timedelta(days=30)).delete() if result: log.verbose('Removed %d entries from remember rejected table.' % result) register_plugin(FilterRememberRejected, 'remember_rejected', builtin=True, api_ver=2) register_parser_option( '--forget-rejected', action='store_true', dest='forget_rejected', help='Forget all previous rejections so entries can be processed again.')
def __iter__(self): log.info('Query:\n\t%s' % unicode(self).replace('\n', '\n\t')) explain = self.session.execute(Explain(self)).fetchall() text = '\n\t'.join('|'.join(str(x) for x in line) for line in explain) before = time() result = Query.__iter__(self) log.info('Query Time: %0.3f Explain Query Plan:\n\t%s' % (time() - before, text)) return result @event('manager.execute.started') def register_sql_explain(man): if man.options.explain_sql: maininit = manager.Session.__init__ def init(*args, **kwargs): kwargs['query_cls'] = ExplainQuery return maininit(*args, **kwargs) manager.Session.__init__ = init @event('manager.execute.completed') def deregister_sql_explain(man): if man.options.explain_sql: manager.Session = sessionmaker() register_parser_option('--explain-sql', action='store_true', dest='explain_sql', default=False, help=SUPPRESS)
return manager.disable_tasks() plugin = manager.options.reset_plugin if plugin == '__list__': console('%-20s Ver Tables' % 'Name') console('-' * 79) for k, v in sorted(plugin_schemas.iteritems()): tables = '' line_len = 0 for name in v['tables']: if line_len + len(name) + 2 >= 53: tables += '\n' tables += ' ' * 25 line_len = len(name) + 2 else: line_len += len(name) + 2 tables += name + ', ' tables = tables.rstrip(', ') console('%-20s %s %s' % (k, v['version'], tables)) else: try: reset_schema(plugin) console('The database for `%s` has been reset.' % plugin) except ValueError as e: console('Unable to reset %s: %s' % (plugin, e.message)) register_parser_option('--reset-plugin', action='store', nargs='?', dest='reset_plugin', const='__list__', default=None, metavar='PLUGIN', help='Reset the database for given PLUGIN. List known names without PLUGIN argument.')
log.debug('interval not met') interval_count += 1 entry.reject('discover interval not met') entry.complete() continue de.last_execution = datetime.datetime.now() log.debug('interval passed') result.append(entry) if interval_count: log.verbose('Discover interval of %s not met for %s entries. Use --discover-now to override.' % (config['interval'], interval_count)) return result def on_task_input(self, task, config): task.no_entries_ok = True entries = self.execute_inputs(config, task) log.verbose('Discovering %i titles ...' % len(entries)) if len(entries) > 500: log.critical('Looks like your inputs in discover configuration produced ' 'over 500 entries, please reduce the amount!') # TODO: the entries that are estimated should be given priority over expiration entries = self.interval_expired(config, task, entries) if not config.get('ignore_estimations', False): entries = self.estimated(entries) return self.execute_searches(config, entries) register_plugin(Discover, 'discover', api_ver=2) register_parser_option('--discover-now', action='store_true', dest='discover_now', default=False, help='Immediately try to discover everything.')
expires = None if remember_time: if isinstance(remember_time, basestring): remember_time = parse_timedelta(remember_time) expires = datetime.now() + remember_time if not entry.get('title') or not entry.get('original_url'): log.debug('Can\'t remember rejection for entry without title or url.') return message = 'Remembering rejection of `%s`' % entry['title'] if remember_time: message += ' for %i minutes' % (remember_time.seconds / 60) log.info(message) (remember_task_id,) = task.session.query(RememberTask.id).filter(RememberTask.name == task.name).first() task.session.add(RememberEntry(title=entry['title'], url=entry['original_url'], task_id=remember_task_id, rejected_by=task.current_plugin, reason=kwargs.get('reason'), expires=expires)) # The test stops passing when this is taken out for some reason... task.session.flush() @event('manager.db_cleanup') def db_cleanup(session): # Remove entries older than 30 days result = session.query(RememberEntry).filter(RememberEntry.added < datetime.now() - timedelta(days=30)).delete() if result: log.verbose('Removed %d entries from remember rejected table.' % result) register_plugin(FilterRememberRejected, 'remember_rejected', builtin=True, api_ver=2) register_parser_option('--forget-rejected', action='store_true', dest='forget_rejected', help='Forget all previous rejections so entries can be processed again.')
# 2, args is the minimum allowed (operation + item) for actions other than list if len(values) >= 2: options['what'] = values[1] # 3, quality if len(values) >= 3: try: options['quality'] = qualities.Requirements(values[2]) except ValueError, e: raise ArgumentError(self, '`%s` is an invalid quality requirement string: %s' % (values[2], e.message)) else: options['quality'] = qualities.Requirements('any') # TODO: Get default from config somehow? # why not use the quality user has queued most, ie option called 'auto' ? # and if none is queued default to something good like '720p bluray' # 4, force download if len(values) >= 4: options['force'] = str_to_boolean(values[3]) else: options['force'] = True if len(values) > 4: raise ArgumentError(self, 'Too many arguments passed.\nUsage: ' + USAGE) register_plugin(MovieQueueManager, 'movie_queue_manager', builtin=True) register_parser_option('--movie-queue', nargs='*', metavar=('ACTION', 'TITLE'), action=MovieQueueAction, help=USAGE)
# Remove seen fields over a year old result = session.query(SeenField).filter( SeenField.added < datetime.now() - timedelta(days=365)).delete() if result: log.verbose('Removed %d seen fields older than 1 year.' % result) register_plugin(FilterSeen, 'seen', builtin=True, api_ver=2) register_plugin(SeenSearch, '--seen-search', builtin=True) register_plugin(SeenCmd, '--seen', builtin=True) register_plugin(SeenForget, '--forget', builtin=True) register_plugin(MigrateSeen, 'migrate_seen', builtin=True) register_parser_option('--forget', action='store', dest='forget', default=False, metavar='TASK|VALUE', help='Forget task (completely) or given title or url.') register_parser_option('--seen', action='store', dest='seen', default=False, metavar='VALUE', help='Add title or url to what has been seen in tasks.') register_parser_option('--seen-search', action='store', dest='seen_search', default=False, metavar='VALUE', help='Search given text from seen database.')
if onlyfeed.lower() not in enabled_feeds: # If any of the feeds do not exist, exit with an error feed.manager.disable_feeds() raise PluginError('Could not find feed \'%s\'' % onlyfeed, log) # If current feed is not among the specified feeds, disable it if feed.name.lower() not in [f.lower() for f in onlyfeeds]: feed.enabled = False class ManualFeed(object): """Only execute feed when specified with --feed""" def validator(self): from flexget import validator return validator.factory('boolean') def on_process_start(self, feed): # Make sure we need to run if not feed.config['manual']: return # If --feed hasn't been specified disable this plugin if not feed.manager.options.onlyfeed: log.debug('Disabling feed %s' % feed.name) feed.enabled = False register_plugin(OnlyFeed, '--feed', builtin=True) register_plugin(ManualFeed, 'manual') register_parser_option('--feed', action='store', dest='onlyfeed', default=None, metavar='FEED', help='Run only specified feed.')
items = queue_get(session=session, downloaded=downloaded) console("-" * 79) console("%-10s %-7s %-37s %-15s %s" % ("IMDB id", "TMDB id", "Title", "Quality", "Force")) console("-" * 79) for item in items: console("%-10s %-7s %-37s %-15s %s" % (item.imdb_id, item.tmdb_id, item.title, item.quality, item.immortal)) if not items: console("No results") console("-" * 79) def clear(self, session): """Delete movie queue""" items = queue_get(session=session, downloaded=False) console("Removing the following movies from movie queue:") console("-" * 79) for item in items: console(item.title) queue_del(item.imdb_id) if not items: console("No results") console("-" * 79) register_plugin(MovieQueueManager, "movie_queue_manager", builtin=True) register_parser_option("--movie-queue", action="callback", callback=MovieQueueManager.optik_movie_queue, help=USAGE)
def parse_replaces(self, task): """Parses commandline string into internal dict""" s = task.manager.options.cli_config if not s: return False # nothing to process if self.replaces: return True # already parsed for item in s.split(','): try: key, value = item.split('=') except ValueError: log.critical('Invalid --cli-config, no name for %s' % item) continue self.replaces[key.strip()] = value.strip() return True def on_process_start(self, task): if self.parse_replaces(task): task.config = self.replace_item(task.config) log.debug(task.config) register_plugin(CliConfig, 'cli_config', builtin=True) register_parser_option( '--cli-config', action='store', dest='cli_config', default=False, metavar='PARAMS', help='Configuration parameters trough commandline. See --doc cli_config.')
from __future__ import unicode_literals, division, absolute_import import logging from flexget.plugin import register_parser_option from flexget.event import event from flexget.schema import reset_schema from flexget.utils.tools import console log = logging.getLogger('reset_plugin') @event('manager.upgrade', priority=255) def reset_plugin(manager): if not manager.options.reset_plugin: return manager.disable_tasks() plugin = manager.options.reset_plugin try: reset_schema(plugin) console('The database for `%s` has been reset.' % plugin) except ValueError as e: console('Unable to reset %s: %s' % (plugin, e.message)) register_parser_option('--reset-plugin', action='store', dest='reset_plugin', default=None, metavar='PLUGIN', help='Reset the database for given PLUGIN')
import logging from argparse import SUPPRESS from flexget.plugin import register_plugin, register_parser_option, priority log = logging.getLogger('dump_config') class OutputDumpConfig(object): """ Dumps task config in STDOUT in yaml at exit or abort event. """ @priority(-255) def on_task_start(self, task): if task.manager.options.dump_config: import yaml print '--- config from task: %s' % task.name print yaml.safe_dump(task.config) print '---' task.abort(silent=True) if task.manager.options.dump_config_python: print task.config task.abort(silent=True) register_plugin(OutputDumpConfig, 'dump_config', debug=True, builtin=True) register_parser_option('--dump-config', action='store_true', dest='dump_config', default=False, help='Display the config of each feed after preset merging/config generation occurs.') register_parser_option('--dump-config-python', action='store_true', dest='dump_config_python', default=False, help=SUPPRESS)
if indent < sys.maxint: for line in lines[1:]: trimmed.append(line[indent:].rstrip()) # Strip off trailing and leading blank lines: while trimmed and not trimmed[-1]: trimmed.pop() while trimmed and not trimmed[0]: trimmed.pop(0) # Return a single string: return '\n'.join(trimmed) @event('manager.startup') def print_doc(manager): if manager.options.doc: manager.disable_tasks() plugin_name = manager.options.doc plugin = plugins.get(plugin_name, None) if plugin: if not plugin.instance.__doc__: print 'Plugin %s does not have documentation' % plugin_name else: print '' print trim(plugin.instance.__doc__) print '' else: print 'Could not find plugin %s' % plugin_name register_parser_option('--doc', action='store', dest='doc', default=False, metavar='PLUGIN', help='Display plugin documentation. See also --plugins.')
from flexget import validator return validator.factory('boolean') def on_process_start(self, task): if not task.manager.options.mem_usage: return # start only once if self.heapy: return self.heapy = hpy() def on_process_end(self, task): if not task.manager.options.mem_usage: return # prevents running this multiple times ... if not self.heapy: return print 'Calculating memory usage:' print self.heapy.heap() print '-' * 79 print self.heapy.heap().get_rp(40) self.heapy = None register_plugin(OutputMemUsage, 'mem_usage', builtin=True) register_parser_option('--mem-usage', action='store_true', dest='mem_usage', default=False, help='Display memory usage debug information')
def validator(self): root = validator.factory() root.accept('text') presets = root.accept('list') presets.accept('text') return root @priority(250) def on_task_start(self, task, config): if isinstance(config, basestring): config = [config] # let's disable them for disable in config: if disable in task.config: log.debug('disabling %s' % disable) del(task.config[disable]) root_config_schema = { 'type': 'object', 'additionalProperties': {} } register_config_key('presets', root_config_schema) register_plugin(PluginPreset, 'preset', builtin=True, api_ver=2) register_plugin(DisablePlugin, 'disable_plugin', api_ver=2) register_parser_option('--preset', action='store', dest='preset', default=False, metavar='NAME', help='Execute tasks with given preset.')
options = self.parse_arguments(task.manager.options.inject) # disable other inputs log.info('Disabling the rest of the input phase.') task.disable_phase('input') # create our injected entry entry = Entry(options['entry'], injected=True) if not 'url' in entry: entry['url'] = 'http://localhost/inject/%s' % ''.join([ random.choice(string.letters + string.digits) for x in range(1, 30) ]) if entry.get('immortal'): log.debug('Injected entry is immortal') task.all_entries.append(entry) if options.get('accept', False): log.debug('accepting the injection') entry.accept('--inject accepted') register_plugin(InputInject, '--inject', debug=True, builtin=True) register_parser_option( '--inject', nargs='+', metavar=('TITLE', 'URL'), help='Injects entry to all executed tasks: <TITLE> [URL] [ACCEPT] [FORCE]')
def __call__(self, parser, namespace, values, option_string=None): namespace.archive = options = {} usage = ''' Usage for --archive ACTION args, these are subjected to change in near future. consolidate Migrate old archive data to new model, may take a long time. search [@TAG]s KEYWORDS Search from the archive. inject ID [ID] [yes] Inject as accepted from archive by ID\'s. If yes is given immortal flag will be used.') tag-source SRC TAG [TAG] Tag all archived items within source with given tag.''' if not values: raise ArgumentError(self, usage) action = values[0].lower() if action not in ArchiveCli.ACTIONS: raise ArgumentError(self, usage) options['action'] = action options['args'] = [unicode(arg) for arg in values[1:]] register_plugin(Archive, 'archive', api_ver=2) register_plugin(ArchiveInject, 'archive_inject', api_ver=2, builtin=True) register_plugin(UrlrewriteArchive, 'flexget_archive', groups=['search']) register_plugin(ArchiveCli, '--archive-cli', builtin=True, api_ver=2) register_parser_option('--archive', nargs='*', action=ArchiveCLIAction, metavar=('ACTION', 'ARGS'), help='Access [search|inject|tag-source|consolidate] functionalities. ' 'Without any args display help about those.')
"""Make sure all temp files are cleaned up when task exits""" self.cleanup_temp_files(task) def on_task_abort(self, task, config): """Make sure all temp files are cleaned up when task is aborted.""" self.cleanup_temp_files(task) def cleanup_temp_file(self, entry): if 'file' in entry: if os.path.exists(entry['file']): log.debug('removing temp file %s from %s' % (entry['file'], entry['title'])) os.remove(entry['file']) shutil.rmtree(os.path.dirname(entry['file'])) del (entry['file']) def cleanup_temp_files(self, task): """Checks all entries for leftover temp files and deletes them.""" for entry in task.entries + task.rejected + task.failed: self.cleanup_temp_file(entry) register_plugin(PluginDownload, 'download', api_ver=2) register_parser_option( '--dl-path', action='store', dest='dl_path', default=False, metavar='PATH', help='Override path for download plugin. Applies to all executed tasks.')
for name in sorted(plugins): plugin = plugins[name] # do not include test classes, unless in debug mode if plugin.get('debug_plugin', False) and not manager.options.debug: continue flags = [] if plugin.instance.__doc__: flags.append('--doc') if plugin.builtin: flags.append('builtin') if plugin.debug: flags.append('debug') handlers = plugin.phase_handlers roles = ', '.join('%s(%s)' % (phase, handlers[phase].priority) for phase in handlers) print '%-20s%-30s%s' % (name, roles, ', '.join(flags)) print '-' * 79 register_parser_option('--plugins', action='store_true', dest='plugins', default=False, help='Print registered plugins summary') register_parser_option('--list', action='store_true', dest='plugins', default=False, help=SUPPRESS)
""" def validator(self): from flexget import validator return validator.factory('boolean') @priority(0) def on_task_output(self, task): if 'dump' not in task.config and not task.manager.options.dump_entries: return #from flexget.utils.tools import sanitize #import yaml eval_lazy = task.manager.options.dump_entries == 'eval' trace = task.manager.options.dump_entries == 'trace' undecided = [entry for entry in task.entries if not entry in task.accepted] if undecided: console('-- Undecided: --------------------------') dump(undecided, task.manager.options.debug, eval_lazy, trace) if task.accepted: console('-- Accepted: ---------------------------') dump(task.accepted, task.manager.options.debug, eval_lazy, trace) if task.rejected: console('-- Rejected: ---------------------------') dump(task.rejected, task.manager.options.debug, eval_lazy, trace) register_plugin(OutputDump, 'dump', builtin=True) register_parser_option('--dump', nargs='?', choices=['eval', 'trace'], const=True, dest='dump_entries', help='Display all entries in task with details. ' 'Arg `--dump eval` will evaluate all lazy fields.')
@priority(255) def on_task_start(self, task, config): # Allow reruns if task.is_rerun: return if task.manager.options.learn: log.info('Ignoring task %s interval for --learn' % task.name) return last_time = task.simple_persistence.get('last_time') if not last_time: log.info('No previous run recorded, running now') elif task.manager.options.interval_ignore: log.info('Ignoring interval because of --now') else: log.debug('last_time: %r' % last_time) log.debug('interval: %s' % config) next_time = last_time + parse_timedelta(config) log.debug('next_time: %r' % next_time) if datetime.datetime.now() < next_time: log.debug('interval not met') log.verbose('Interval %s not met on task %s. Use --now to override.' % (config, task.name)) task.abort('Interval not met', silent=True) return log.debug('interval passed') task.simple_persistence['last_time'] = datetime.datetime.now() register_plugin(PluginInterval, 'interval', api_ver=2) register_parser_option('--now', action='store_true', dest='interval_ignore', default=False, help='Ignore interval(s)')
entry.on_reject(self.verbose_details, task=task, act='rejected', reason='') entry.on_fail(self.verbose_details, task=task, act='failed', reason='') def verbose_details(self, entry, task=None, act=None, reason=None, **kwargs): msg = "%s: `%s` by %s plugin" % (act.upper(), entry['title'], task.current_plugin) if reason: msg += ' because %s' % reason[0].lower() + reason[1:] task_log.verbose(msg) def on_task_exit(self, task, config): if task.manager.options.silent: return # verbose undecided entries if task.manager.options.verbose: undecided = False for entry in task.entries: if entry in task.accepted: continue undecided = True log.verbose('UNDECIDED: `%s`' % entry['title']) if undecided: log_once('Undecided entries have not been accepted or rejected. If you expected these to reach output,' ' you must set up filter plugin(s) to accept them.', logger=log) register_plugin(Verbose, 'verbose', builtin=True, api_ver=2) register_parser_option('-v', '--verbose', action='store_true', dest='verbose', default=False, help='Verbose undecided entries.') register_parser_option('-s', '--silent', action='store_true', dest='silent', default=False, help='Don\'t verbose any actions (accept, reject, fail).')
console('-' * 79) def queue_get(self): """Get the current IMDb queue. Returns: List of QueuedMovie objects (detached from session) """ session = Session() try: items = session.query(QueuedMovie).all() for item in items: if not item.title: # old database does not have title / title not retrieved try: item.title = self.parse_what(item.imdb_id)['title'] except QueueError: item.title = 'N/A' return items finally: session.close() register_plugin(FilterMovieQueue, 'movie_queue', api_ver=2) register_plugin(MovieQueueManager, 'movie_queue_manager', builtin=True) register_parser_option('--movie-queue', action='callback', callback=MovieQueueManager.optik_imdb_queue, help='(add|del|list) [IMDB_URL|NAME] [QUALITY]')
presets.accept('text') return root @priority(250) def on_task_start(self, task, config): if isinstance(config, basestring): config = [config] # let's disable them for disable in config: if disable in task.config: log.debug('disabling %s' % disable) del (task.config[disable]) root_config_schema = { 'type': 'object', 'additionalProperties': {} # TODO: Reject keys that are plugin names } register_config_key('presets', root_config_schema) register_plugin(PluginPreset, 'preset', builtin=True, api_ver=2) register_plugin(DisablePlugin, 'disable_plugin', api_ver=2) register_parser_option('--preset', action='store', dest='preset', default=False, metavar='NAME', help='Execute tasks with given preset.')
'Discover interval %s not met for %s. Use --discover-now to override.' % (config['interval'], entry['title'])) continue de.last_execution = datetime.datetime.now() log.debug('interval passed') result.append(entry) return result @cached('discover') def on_task_input(self, task, config): task.no_entries_ok = True entries = self.execute_inputs(config, task) log.verbose('Discovering %i titles ...' % len(entries)) if len(entries) > 500: log.critical( 'Looks like your inputs in discover configuration produced ' 'over 500 entries, please reduce the amount!') # TODO: the entries that are estimated should be given priority over expiration entries = self.interval_expired(config, task, entries) if not config.get('ignore_estimations', False): entries = self.estimated(entries) return self.execute_searches(config, entries) register_plugin(Discover, 'discover', api_ver=2) register_parser_option('--discover-now', action='store_true', dest='discover_now', default=False, help='Immediately try to discover everything.')
for entry in task.failed: item = task.session.query(FailedEntry).filter(FailedEntry.title == entry['title']).\ filter(FailedEntry.url == entry['original_url']).first() if item: # Do not count the failure on this run when adding additional retry time fail_count = item.count - 1 # Don't bother saving this if it has met max retries if fail_count >= config['max_retries']: continue # Timedeltas do not allow floating point multiplication. Convert to seconds and then back to avoid this. base_retry_secs = base_retry_time.days * 86400 + base_retry_time.seconds retry_secs = base_retry_secs * (retry_time_multiplier ** fail_count) retry_time = timedelta(seconds=retry_secs) else: retry_time = base_retry_time if self.backlog: self.backlog.add_backlog(task, entry, amount=retry_time) if retry_time: fail_reason = item.reason if item else entry.get('reason', 'unknown') entry.reject(reason='Waiting before trying failed entry again. (failure reason: %s)' % fail_reason, remember_time=retry_time) # Cause a task rerun, to look for alternate releases task.rerun() register_plugin(PluginFailed, '--failed', builtin=True, api_ver=2) register_plugin(FilterRetryFailed, 'retry_failed', builtin=True, api_ver=2) register_parser_option('--failed', action='store_true', dest='failed', default=0, help='List recently failed entries.') register_parser_option('--clear', action='store_true', dest='clear_failed', default=0, help='Clear recently failed list.')
# store final destination as output key entry['output'] = destfile finally: self.cleanup_temp_file(entry) def on_feed_exit(self, feed): """Make sure all temp files are cleaned up when feed exits""" self.cleanup_temp_files(feed) def on_feed_abort(self, feed): """Make sure all temp files are cleaned up when feed is aborted.""" self.cleanup_temp_files(feed) def cleanup_temp_file(self, entry): if 'file' in entry: if os.path.exists(entry['file']): log.debug('removing temp file %s from %s' % (entry['file'], entry['title'])) os.remove(entry['file']) del(entry['file']) def cleanup_temp_files(self, feed): """Checks all entries for leftover temp files and deletes them.""" for entry in feed.entries + feed.rejected + feed.failed: self.cleanup_temp_file(entry) register_plugin(PluginDownload, 'download') register_parser_option('--dl-path', action='store', dest='dl_path', default=False, metavar='PATH', help='Override path for download plugin. Applies to all executed feeds.')
@event('task.execute.before_plugin') def before(task, keyword): fd = _start.setdefault(task.name, {}) fd.setdefault('time', {})[keyword] = time.time() fd.setdefault('queries', {})[keyword] = query_count @event('task.execute.after_plugin') def after(task, keyword): took = time.time() - _start[task.name]['time'][keyword] queries = query_count - _start[task.name]['queries'][keyword] # Store results, increases previous values pd = performance.setdefault(task.name, {}) data = pd.setdefault(keyword, {}) data['took'] = data.get('took', 0) + took data['queries'] = data.get('queries', 0) + queries @event('manager.execute.completed') def results(manager): for name, data in performance.iteritems(): log.info('Performance results for task %s:' % name) for keyword, results in data.iteritems(): took = results['took'] queries = results['queries'] if took > 0.1 or queries > 10: log.info('%-15s took %0.2f sec (%s queries)' % (keyword, took, queries)) register_parser_option('--debug-perf', action='store_true', dest='debug_perf', default=False, help=SUPPRESS)