def parse_crawler_config(config_path='crawler.conf'): global _config # 1. get configs _config = ConfigObj(infile=misc.execution_path(config_path), configspec=misc.execution_path(CONFIG_SPEC_PATH)) # Configspec is not being used currently # but keeping validate() and apply_user_args() for future. # Essentially NOP right now # 2. apply defaults vdt = Validator() _config.validate(vdt)
def get_plugins(category_filter={}, plugin_places=['plugins']): pm = PluginManager(plugin_info_ext='plugin') # Normalize the paths to the location of this file. # XXX-ricarkol: there has to be a better way to do this. plugin_places = [misc.execution_path(x) for x in plugin_places] pm.setPluginPlaces(plugin_places) pm.setCategoriesFilter(category_filter) pm.collectPlugins() return pm.getAllPlugins()
def _load_plugins( category_filter={}, filter_func=lambda *arg: True, features=['os', 'cpu'], plugin_places=['plugins'], options={}): pm = PluginManager(plugin_info_ext='plugin') # Normalize the paths to the location of this file. # XXX-ricarkol: there has to be a better way to do this. plugin_places = [misc.execution_path(x) for x in plugin_places] pm.setPluginPlaces(plugin_places) pm.setCategoriesFilter(category_filter) pm.collectPlugins() config = config_parser.get_config() enabled_plugins = [] if 'enabled_plugins' in config['general']: enabled_plugins = config['general']['enabled_plugins'] if 'ALL' in enabled_plugins: enabled_plugins = [p for p in config['crawlers']] # Reading from 'crawlers' section inside crawler.conf # Alternatively, 'ALL' can be made to signify # all crawlers in plugins/* for plugin in pm.getAllPlugins(): if filter_func( plugin.plugin_object, plugin.name, enabled_plugins, features): plugin_args = get_plugin_args(plugin, config, options) yield (plugin.plugin_object, plugin_args)