def plugins_summary(manager, options): if options.table_type == 'porcelain': disable_all_colors() header = ['Keyword', 'Phases', 'Flags'] table_data = [header] for plugin in sorted(get_plugins(phase=options.phase, group=options.group)): if options.builtins and not plugin.builtin: continue flags = [] if plugin.instance.__doc__: flags.append('doc') if plugin.builtin: flags.append('builtin') if plugin.debug: if not options.debug: continue flags.append('developers') handlers = plugin.phase_handlers roles = [] for phase in handlers: priority = handlers[phase].priority roles.append('{0}({1})'.format(phase, priority)) name = colorize('green', plugin.name) if 'builtin' in flags else plugin.name table_data.append([name, ', '.join(roles), ', '.join(flags)]) try: table = TerminalTable(options.table_type, table_data, wrap_columns=[1, 2]) console(table.output) except TerminalTableError as e: console('ERROR: %s' % str(e)) return console(colorize('green', ' Built-in plugins'))
def estimate(self, entry): """ Estimate release schedule for Entry :param entry: :return: estimated date of released for the entry, None if it can't figure it out """ logger.debug(entry['title']) estimators = [ e.instance.estimate for e in plugin.get_plugins(interface='estimate_release') ] for estimator in sorted( estimators, key=lambda e: getattr(e, 'priority', plugin.PRIORITY_DEFAULT), reverse=True): estimate = estimator(entry) # return first successful estimation if estimate is not None: estimation = estimate break else: estimation = {'data_exists': False, 'entity_date': None} return estimation
def url_rewrite(self, task, entry): """Rewrites given entry url. Raises UrlRewritingError if failed.""" tries = 0 while self.url_rewritable(task, entry) and entry.accepted: tries += 1 if tries > 20: raise UrlRewritingError('URL rewriting was left in infinite loop while rewriting url for %s, ' 'some rewriter is returning always True' % entry) for urlrewriter in plugin.get_plugins(group='urlrewriter'): name = urlrewriter.name if name in self.disabled_rewriters: log.trace('Skipping rewriter %s since it\'s disabled' % name) continue try: if urlrewriter.instance.url_rewritable(task, entry): old_url = entry['url'] log.debug('Url rewriting %s' % entry['url']) urlrewriter.instance.url_rewrite(task, entry) if entry['url'] != old_url: log.info('Entry \'%s\' URL rewritten to %s (with %s)' % (entry['title'], entry['url'], name)) except UrlRewritingError as r: # increase failcount #count = self.shared_cache.storedefault(entry['url'], 1) #count += 1 raise UrlRewritingError('URL rewriting %s failed: %s' % (name, r.value)) except plugin.PluginError as e: raise UrlRewritingError('URL rewriting %s failed: %s' % (name, e.value)) except Exception as e: log.exception(e) raise UrlRewritingError('%s: Internal error with url %s' % (name, entry['url']))
def plugins_summary(manager, options): if options.table_type == 'porcelain': disable_colors() header = ['Keyword', 'Interfaces', 'Phases', 'Flags'] table = TerminalTable(*header, table_type=options.table_type) for plugin in sorted( get_plugins(phase=options.phase, interface=options.interface)): if options.builtins and not plugin.builtin: continue flags = [] if plugin.instance.__doc__: flags.append('doc') if plugin.builtin: flags.append('builtin') if plugin.debug: if not options.debug: continue flags.append('developers') handlers = plugin.phase_handlers roles = [] for phase in handlers: priority = handlers[phase].priority roles.append('{0}({1})'.format(phase, priority)) name = colorize('green', plugin.name) if 'builtin' in flags else plugin.name table.add_row(name, ', '.join(plugin.interfaces), ', '.join(roles), ', '.join(flags)) table.caption = colorize('green', ' Built-in plugins') table.caption_justify = 'left' console(table)
def url_rewrite(self, task, entry): """Rewrites given entry url. Raises UrlRewritingError if failed.""" tries = 0 while self.url_rewritable(task, entry) and entry.accepted: tries += 1 if tries > 20: raise UrlRewritingError( 'URL rewriting was left in infinite loop while rewriting url for %s, ' 'some rewriter is returning always True' % entry) for urlrewriter in plugin.get_plugins(group='urlrewriter'): name = urlrewriter.name if name in self.disabled_rewriters: log.trace('Skipping rewriter %s since it\'s disabled' % name) continue try: if urlrewriter.instance.url_rewritable(task, entry): log.debug('Url rewriting %s' % entry['url']) urlrewriter.instance.url_rewrite(task, entry) log.info('Entry \'%s\' URL rewritten to %s (with %s)' % (entry['title'], entry['url'], name)) except UrlRewritingError as r: # increase failcount #count = self.shared_cache.storedefault(entry['url'], 1) #count += 1 raise UrlRewritingError('URL rewriting %s failed: %s' % (name, r.value)) except plugin.PluginError as e: raise UrlRewritingError('URL rewriting %s failed: %s' % (name, e.value)) except Exception as e: log.exception(e) raise UrlRewritingError('%s: Internal error with url %s' % (name, entry['url']))
def plugins_summary(manager, options): if options.porcelain: console('%-30s%-s%-30s%-s%s' % ('Name', '|', 'Roles (priority)', '|', 'Info')) else: console('-' * 79) console('%-30s%-30s%-s' % ('Name', 'Roles (priority)', 'Info')) console('-' * 79) # print the list for plugin in sorted(get_plugins(phase=options.phase, group=options.group)): # do not include test classes, unless in debug mode if plugin.get('debug_plugin', False) and not options.debug: continue flags = [] if plugin.instance.__doc__: flags.append('--doc') if plugin.builtin: flags.append('builtin') if plugin.debug: flags.append('debug') handlers = plugin.phase_handlers roles = ', '.join('%s(%s)' % (phase, handlers[phase].priority) for phase in handlers) tab = '|' if options.porcelain: console('%-30s%-s%-30s%-s%s' % (plugin.name, '|', roles, '|', ', '.join(flags))) else: console('%-30s%-30s%-s' % (plugin.name, roles, ', '.join(flags))) if options.porcelain: pass else: console('-' * 79)
def test_parsing_plugins_have_parse_methods(self): for parser_type in plugin_parsing.PARSER_TYPES: for p in plugin.get_plugins(interface='%s_parser' % parser_type): assert hasattr( p.instance, 'parse_%s' % parser_type ), '{type} parsing plugin {name} has no parse_{type} method'.format( type=parser_type, name=p.name)
def test_parsing_plugins_have_parse_methods(self): for parser_type in plugin_parsing.PARSER_TYPES: for p in plugin.get_plugins(interface='%s_parser' % parser_type): assert hasattr( p.instance, 'parse_%s' % parser_type ), '{type} parsing plugin {name} has no parse_{type} method'.format( type=parser_type, name=p.name )
def init_parsers(manager): """Prepare our list of parsing plugins and default parsers.""" for parser_type in PARSER_TYPES: parsers[parser_type] = {} for p in plugin.get_plugins(group=parser_type + '_parser'): parsers[parser_type][p.name.replace('parser_', '')] = p.instance # Select default parsers based on priority func_name = 'parse_' + parser_type default_parsers[parser_type] = max(parsers[parser_type].iteritems(), key=lambda p: getattr(getattr(p[1], func_name), 'priority', 0))[0]
def url_rewritable(self, task, entry): """Return True if entry is urlrewritable by registered rewriter.""" for urlrewriter in plugin.get_plugins(group='urlrewriter'): if urlrewriter.name in self.disabled_rewriters: log.trace('Skipping rewriter %s since it\'s disabled' % urlrewriter.name) continue log.trace('checking urlrewriter %s' % urlrewriter.name) if urlrewriter.instance.url_rewritable(task, entry): return True return False
def url_rewritable(self, task, entry): """Return True if entry is urlrewritable by registered rewriter.""" for urlrewriter in plugin.get_plugins(interface='urlrewriter'): if urlrewriter.name in self.disabled_rewriters: log.trace('Skipping rewriter %s since it\'s disabled' % urlrewriter.name) continue log.trace('checking urlrewriter %s' % urlrewriter.name) if urlrewriter.instance.url_rewritable(task, entry): return True return False
def schema(self): # Create a schema allowing only our registered parsers to be used under the key of each parser type properties = {} for parser_type in PARSER_TYPES: parser_names = [ p.name.replace('parser_', '') for p in plugin.get_plugins(interface=parser_type + '_parser') ] properties[parser_type] = {'type': 'string', 'enum': parser_names} s = {'type': 'object', 'properties': properties, 'additionalProperties': False} return s
def init_parsers(manager): """Prepare our list of parsing plugins and default parsers.""" for parser_type in PARSER_TYPES: parsers[parser_type] = {} for p in plugin.get_plugins(group=parser_type + '_parser'): parsers[parser_type][p.name.replace('parser_', '')] = p.instance # Select default parsers based on priority func_name = 'parse_' + parser_type default_parsers[parser_type] = max( parsers[parser_type].iteritems(), key=lambda p: getattr(getattr(p[1], func_name), 'priority', 0))[0]
def init_parsers(manager): """Prepare our list of parsing plugins and default parsers.""" for parser_type in PARSER_TYPES: parsers[parser_type] = {} for p in plugin.get_plugins(interface=parser_type + '_parser'): parsers[parser_type][p.name.replace('parser_', '')] = p.instance # Select default parsers based on priority func_name = 'parse_' + parser_type default_parsers[parser_type] = max(iter(parsers[parser_type].items()), key=lambda p: getattr(getattr(p[1], func_name), 'priority', 0))[0] log.debug('setting default %s parser to %s. (options: %s)' % (parser_type, default_parsers[parser_type], parsers[parser_type]))
def schema(self): # Create a schema allowing only our registered parsers to be used under the key of each parser type properties = {} for parser_type in PARSER_TYPES: parser_names = [p.name.replace('parser_', '') for p in plugin.get_plugins(group=parser_type + '_parser')] properties[parser_type] = {'type': 'string', 'enum': parser_names} s = { 'type': 'object', 'properties': properties, 'additionalProperties': False } return s
def on_task_urlrewrite(self, task, config): # no searches in unit test mode if task.manager.unit_test: return plugins = {} for p in plugin.get_plugins(group='search'): plugins[p.name] = p.instance # search accepted for entry in task.accepted: # loop through configured searches for name in config: search_config = None if isinstance(name, dict): # the name is the first/only key in the dict. name, search_config = list(name.items())[0] log.verbose('Searching `%s` from %s' % (entry['title'], name)) try: try: results = plugins[name].search(task=task, entry=entry, config=search_config) except TypeError: # Old search api did not take task argument log.warning( 'Search plugin %s does not support latest search api.' % name) results = plugins[name].search(entry, search_config) matcher = SequenceMatcher(a=entry['title']) for result in sorted(results, key=lambda e: e.get('search_sort'), reverse=True): matcher.set_seq2(result['title']) if matcher.ratio() > 0.9: log.debug('Found url: %s', result['url']) entry['url'] = result['url'] break else: log.debug('Match %s is not close enough', result['title']) else: continue break except (plugin.PluginError, plugin.PluginWarning) as pw: log.verbose('Failed: %s' % pw.value) continue # Search failed else: # If I don't have a URL, doesn't matter if I'm immortal... entry['immortal'] = False entry.reject('search failed')
def plugins(self, phase=None): """Get currently enabled plugins. :param string phase: Optional, limits to plugins currently configured on given phase, sorted in phase order. :return: An iterator over configured :class:`flexget.plugin.PluginInfo` instances enabled on this task. """ if phase: plugins = sorted(get_plugins(phase=phase), key=lambda p: p.phase_handlers[phase], reverse=True) else: plugins = all_plugins.itervalues() return (p for p in plugins if p.name in self.config or p.builtin)
def get(self, session=None): """ Get list of registered plugins """ args = plugins_parser.parse_args() plugin_list = [] try: for plugin in get_plugins(phase=args['phase'], group=args['group']): p = plugin_to_dict(plugin) if args['include_schema']: p['schema'] = plugin.schema plugin_list.append(p) except ValueError as e: raise BadRequest(str(e)) return {'plugin_list': plugin_list, 'number_of_plugins': len(plugin_list)}
def __init__(self): self.parsers = {} self.parses_names = {} self.default_parser = {} for parser_type in PARSER_TYPES: self.parsers[parser_type] = {} self.parses_names[parser_type] = {} for p in plugin.get_plugins(group=parser_type + '_parser'): self.parsers[parser_type][p.name.replace('parser_', '')] = p.instance self.parses_names[parser_type][p.name.replace('parser_', '')] = p.name # Select default parsers based on priority func_name = 'parse_' + parser_type self.default_parser[parser_type] = max(self.parsers[parser_type].values(), key=lambda plugin: getattr(getattr(plugin, func_name), 'priority', 0)) self.parser = self.default_parser
def on_task_urlrewrite(self, task, config): # no searches in unit test mode if task.manager.unit_test: return plugins = {} for p in plugin.get_plugins(interface='search'): plugins[p.name] = p.instance # search accepted for entry in task.accepted: # loop through configured searches for name in config: search_config = None if isinstance(name, dict): # the name is the first/only key in the dict. name, search_config = list(name.items())[0] log.verbose('Searching `%s` from %s' % (entry['title'], name)) try: try: results = plugins[name].search( task=task, entry=entry, config=search_config ) except TypeError: # Old search api did not take task argument log.warning('Search plugin %s does not support latest search api.' % name) results = plugins[name].search(entry, search_config) matcher = SequenceMatcher(a=entry['title']) for result in results: matcher.set_seq2(result['title']) if matcher.ratio() > 0.9: log.debug('Found url: %s', result['url']) entry['url'] = result['url'] break else: log.debug('Match %s is not close enough', result['title']) else: continue break except (plugin.PluginError, plugin.PluginWarning) as pw: log.verbose('Failed: %s' % pw.value) continue # Search failed else: # If I don't have a URL, doesn't matter if I'm immortal... entry['immortal'] = False entry.reject('search failed')
def get(self, session=None): """ Get list of registered plugins """ args = plugins_parser.parse_args() # Pagination and sorting params page = args['page'] per_page = args['per_page'] # Handle max size limit if per_page > 100: per_page = 100 start = per_page * (page - 1) stop = start + per_page plugin_list = [] try: for plugin in get_plugins(phase=args['phase'], interface=args['interface']): p = plugin_to_dict(plugin) if args['include_schema']: p['schema'] = plugin.schema plugin_list.append(p) except ValueError as e: raise BadRequest(str(e)) total_items = len(plugin_list) sliced_list = plugin_list[start:stop] # Total number of pages total_pages = int(ceil(total_items / float(per_page))) if page > total_pages and total_pages != 0: raise NotFoundError('page %s does not exist' % page) # Actual results in page actual_size = min(per_page, len(sliced_list)) # Get pagination headers pagination = pagination_headers(total_pages, total_items, actual_size, request) rsp = jsonify(sliced_list) # Add link header to response rsp.headers.extend(pagination) return rsp
def estimate(self, entry): """ Estimate release schedule for Entry :param entry: :return: estimated date of released for the entry, None if it can't figure it out """ log.debug(entry['title']) estimators = [e.instance.estimate for e in plugin.get_plugins(interface='estimate_release')] for estimator in sorted( estimators, key=lambda e: getattr(e, 'priority', plugin.DEFAULT_PRIORITY), reverse=True ): estimate = estimator(entry) # return first successful estimation if estimate is not None: return estimate
def estimate(self, entry): """ Estimate release schedule for Entry :param entry: :return: estimated date of released for the entry, None if it can't figure it out """ log.debug(entry['title']) estimators = [e.instance.estimate for e in plugin.get_plugins(group='estimate_release')] for estimator in sorted( estimators, key=lambda e: getattr(e, 'priority', plugin.DEFAULT_PRIORITY), reverse=True ): estimate = estimator(entry) # return first successful estimation if estimate is not None: return estimate
def __init__(self): self.parsers = {} self.parses_names = {} self.default_parser = {} for parser_type in PARSER_TYPES: self.parsers[parser_type] = {} self.parses_names[parser_type] = {} for p in plugin.get_plugins(group=parser_type + '_parser'): self.parsers[parser_type][p.name.replace('parser_', '')] = p.instance self.parses_names[parser_type][p.name.replace('parser_', '')] = p.name # Select default parsers based on priority func_name = 'parse_' + parser_type self.default_parser[parser_type] = max( self.parsers[parser_type].values(), key=lambda plugin: getattr(getattr(plugin, func_name), 'priority', 0)) self.parser = self.default_parser
def url_rewrite(self, task, entry): """Rewrites given entry url. Raises UrlRewritingError if failed.""" tries = 0 while self.url_rewritable(task, entry) and entry.accepted: tries += 1 if tries > 20: raise UrlRewritingError( 'URL rewriting was left in infinite loop while rewriting url for %s, ' 'some rewriter is returning always True' % entry ) for urlrewriter in plugin.get_plugins(interface='urlrewriter'): name = urlrewriter.name if name in self.disabled_rewriters: logger.trace("Skipping rewriter {} since it's disabled", name) continue try: if urlrewriter.instance.url_rewritable(task, entry): old_url = entry['url'] logger.debug('Url rewriting {}', entry['url']) urlrewriter.instance.url_rewrite(task, entry) if entry['url'] != old_url: if entry.get('urls') and old_url in entry.get('urls'): entry['urls'][entry['urls'].index(old_url)] = entry['url'] logger.info( "Entry '{}' URL rewritten to {} (with {})", entry['title'], entry['url'], name, ) except UrlRewritingError as r: # increase failcount # count = self.shared_cache.storedefault(entry['url'], 1) # count += 1 raise UrlRewritingError('URL rewriting %s failed: %s' % (name, r.value)) except plugin.PluginError as e: raise UrlRewritingError('URL rewriting %s failed: %s' % (name, e.value)) except Exception as e: logger.exception(e) raise UrlRewritingError( '%s: Internal error with url %s' % (name, entry['url']) )
def plugins_summary(manager, options): console("-" * 79) console("%-20s%-30s%s" % ("Name", "Roles (priority)", "Info")) console("-" * 79) # print the list for plugin in sorted(get_plugins(phase=options.phase, group=options.group)): # do not include test classes, unless in debug mode if plugin.get("debug_plugin", False) and not options.debug: continue flags = [] if plugin.instance.__doc__: flags.append("--doc") if plugin.builtin: flags.append("builtin") if plugin.debug: flags.append("debug") handlers = plugin.phase_handlers roles = ", ".join("%s(%s)" % (phase, handlers[phase].priority) for phase in handlers) console("%-20s%-30s%s" % (plugin.name, roles, ", ".join(flags))) console("-" * 79)
def on_task_urlrewrite(self, task, config): # no searches in unit test mode if task.manager.unit_test: return plugins = {} for p in plugin.get_plugins(group='search'): plugins[p.name] = p.instance # search accepted for entry in task.accepted: # loop through configured searches for name in config: search_config = None if isinstance(name, dict): # the name is the first/only key in the dict. name, search_config = name.items()[0] log.verbose('Searching `%s` from %s' % (entry['title'], name)) try: results = plugins[name].search(entry, search_config) matcher = SequenceMatcher(a=entry['title']) for result in sorted(results, key=lambda e: e.get('search_sort'), reverse=True): matcher.set_seq2(result['title']) if matcher.ratio() > 0.9: log.debug('Found url: %s', result['url']) entry['url'] = result['url'] break else: continue break except (plugin.PluginError, plugin.PluginWarning) as pw: log.verbose('Failed: %s' % pw.value) continue # Search failed else: # If I don't have a URL, doesn't matter if I'm immortal... entry['immortal'] = False entry.reject('search failed')
def supported_ids(self): # Return a list of supported series identifier as registered via their plugins return [ p.instance.movie_identifier for p in plugin.get_plugins(interface='movie_metainfo') ]
def get_plugins(self, interface): plugins = list(plugin.get_plugins(interface=interface)) assert plugins, 'No plugins for this interface found.' return plugins
def plugins_by_category(category): return jsonify(plugins=plugin_infos(get_plugins(category=category)))
def plugins_by_context(context): return jsonify(plugins=plugin_infos(get_plugins(context=context)))
def plugins_by_group(group): return jsonify(plugins=plugin_infos(get_plugins(group=group)))
def plugins_by_phase(phase): try: return jsonify(plugins=plugin_infos(get_plugins(phase=phase))) except Exception as e: return e.message, 404
def test_parsing_plugins_have_parse_methods(self): for parser_type in plugin_parsing.PARSER_TYPES: for plugin in get_plugins(group="%s_parser" % parser_type): assert hasattr( plugin.instance, "parse_%s" % parser_type ), "{type} parsing plugin {name} has no parse_{type} method".format(type=parser_type, name=plugin.name)
def supported_ids(self): # Return a list of supported series identifier as registered via their plugins return [ p.instance.movie_identifier for p in plugin.get_plugins(group='movie_metainfo') ]
def plugins_by_phase(phase): try: return jsonify(plugins=plugin_infos(get_plugins(phase=phase))) except Exception, e: return e.message, 404
def supported_ids(self): # Return a list of supported series identifier as registered via their plugins ids = [] for p in plugin.get_plugins(group='movie_metainfo'): ids.append(p.instance.movie_identifier) return ids