class ProfilesManager(object): def __init__(self): self.plugin_location = [PROFILE_MODULE_DIR] self.pluginManager = PluginManager(plugin_info_ext="plugin") self.pluginManager.setPluginPlaces(self.plugin_location) self.pluginManager.collectPlugins() def modules_info(self): """ Get information with regards to each modules loaded. It includes author, category, copyright, description, details, name, version and website. :return: information of all modules loaded """ modules_info = {} for pluginInfo in self.pluginManager.getAllPlugins(): modules_info[pluginInfo.name] = pluginInfo return modules_info def configForms(self, device_serial, module_name=""): """ Get the configuration views of each modules to be displayed on web interface :return: dictionary of pluginInfo as key and form as value """ if module_name: plugin = self.pluginManager.getPluginByName(name=module_name) configForms = plugin.plugin_object.get_view() else: configForms = {} for pluginInfo in self.pluginManager.getAllPlugins(): form = pluginInfo.plugin_object.get_view() configForms[pluginInfo] = form return configForms def run_simulation(self,option, profile_name, duration, device_serial, package_name, session): """ Run profile simulation script :return: """ plugin = self.pluginManager.getPluginByName(name=profile_name) if option == RANDOM_INTERACTION: plugin.plugin_object.runSimulation(duration,package_name, random=True, device_serial=device_serial, session=session) elif option == SCRIPTED_PROFILE_INTERACTION: plugin.plugin_object.runSimulation(duration,package_name, random=False, device_serial=device_serial, session=session) def setup_device(self,module, params, device_serial): """ install profile apk and profile app data onto device :return: """ pluginInfo = self.pluginManager.getPluginByName(name=module) if pluginInfo.name == module: pluginInfo.plugin_object.prepare(params, device_serial) return True
def create(message): pluginManager = PluginManager() pluginManager.setPluginPlaces( [current_app.config["PLUGINS_PATH"] + os.sep + "validators"]) pluginManager.collectPlugins() for pluginInfo in pluginManager.getAllPlugins(): pluginManager.activatePluginByName(pluginInfo.name) pluginInfo = pluginManager.getPluginByName(message['name']) if pluginInfo != None: raise "Lo sentimos pero el nombre del plugin que ya esta siendo utilizado" plugin_info_file = open( current_app.config["PLUGINS_PATH"] + os.sep + "validators" + os.sep + str(message["name"]) + ".yapsy-plugin", "w") plugin_file = open( current_app.config["PLUGINS_PATH"] + os.sep + "validators" + os.sep + str(message["name"]) + ".py", "w") plugin_info_file.write( str(message["file_info"]).decode("base64", errors="strict")) plugin_file.write(str(message["data"]).decode("base64")) plugin_info_file.write( str('Organization = ' + current_app.config['ORGANIZATION_CONTEXT_ID'] + ' - ' + current_app.config['ORGANIZATION_CONTEXT_DESC'])) plugin_info_file.close() plugin_file.close()
class DownloadManager(object): """DownloadManager will be used to download any kind of resource from a \ source system. DownloadManager will locate all plugins of\ type :class:`IDownloader` and will provide it back to the\ requesting object """ __single_download_manager = None def __new__(cls, *args, **kwargs): """Singleton class constructor """ if cls != type(cls.__single_download_manager): cls.__single_download_manager = object.__new__(cls, *args, **kwargs) return cls.__single_download_manager def __init__(self): """Constructor for DownloadManager """ self.plugins_path = constants.DOWNLOADER_PLUGINS_PATH self.save_downloads_to = constants.SAVE_DOWNLOADS_TO_PATH self.plugin_manager = PluginManager(categories_filter={constants.DOWNLOAD_PLUGIN_FILTER: iplugins.IDownloader}) self.plugin_manager.setPluginPlaces([self.plugins_path]) self.plugin_manager.locatePlugins() self.plugin_manager.loadPlugins() def downloaders_list(self): """Provides a list of all downloader plugins available """ return self.plugin_manager.getPluginsOfCategory(constants.DOWNLOAD_PLUGIN_FILTER) def get_downloader(self, downloader_name=None): """Returns a downloader plugin instance by finding through name """ return self.plugin_manager.getPluginByName(downloader_name, constants.DOWNLOAD_PLUGIN_FILTER)
def execute(route, plugin, payload): logging.basicConfig() # Build the manager simplePluginManager = PluginManager() # Tell it the default place(s) where to find plugins print route simplePluginManager.setPluginPlaces(route) # Load all plugins simplePluginManager.collectPlugins() # Activate all loaded plugins for pluginInfo in simplePluginManager.getAllPlugins(): simplePluginManager.activatePluginByName(pluginInfo.name) # for pluginInfo in simplePluginManager.getAllPlugins(): # var1,var2 = pluginInfo.plugin_object.execute() simplePluginManager.activatePluginByName(plugin) pluginInfo = simplePluginManager.getPluginByName(plugin) if pluginInfo is None: current_app.logger.error( "Lo sentimos pero no se ha podido cargar el plugin o el mismo no se ha encontrado" ) raise Exception("No se ha podido cargar el plugin") return pluginInfo.plugin_object.execute(payload)
class BotInteraction(): def __init__(self, config): self.channel = config['channel'] self.nickname = config['nickname'] self.prompt = config['prompt'] self.parts = "" self.ping = "" self.username = "" self.message = "" self.manager = PluginManager() self.manager.setPluginPlaces(["plugins"]) self.manager.collectPlugins() def handle_line(self, username, message, parts): print(username + ": " + message) if self.nickname not in parts[1] and message[0:1] == self.prompt: message = message.replace(">", " ").split(None, 1) for plugin in self.manager.getAllPlugins(): # Collect all plugins try: # Message[0] - search plugin_name plugin_yapsy = self.manager.getPluginByName(message[0]) command = [] # If the message has arguments - message[1] add to command if len(message) == 2: command = message[1] # To found plugin send arguments. return plugin_yapsy.plugin_object.execute(self.channel, username, command) except: continue def check_ping(self, line): if line.find("PING") != -1: # If server pings then pong line = string.split(line, " ") self.ping = "PONG " + line[1] else: self.ping = "" self.parts = string.split(line, ":") if "QUIT" not in self.parts[1] and \ "JOIN" not in self.parts[1] and \ "PART" not in self.parts[1]: try: # Sets the message variable to the actual message sent self.message = self.parts[2][:len(self.parts[2]) - 1] except: self.message = "" # Sets the username variable to the actual username usernamesplit = string.split(self.parts[1], "!") self.username = usernamesplit[0]
def start_baseline(request): manager = PluginManager() plugin_name = request.data["plugin_name"] plugin = manager.getPluginByName(plugin_name) board = bci.OpenBCIBoard(port=args.port, daisy=args.daisy, filter_data=args.filtering, scaled_output=True, log=args.log) if (plugin_name == "packets_to_csv"): plugin.plugin_object.pre_activate({}, sample_rate=board.getSampleRate(), eeg_channels=board.getNbEEGChannels(), aux_channels=board.getNbAUXChannels())
def execute(self, channel, username, command): manager = PluginManager() manager.setPluginPlaces(["plugins"]) manager.collectPlugins() plugins = [] if command: description = manager.getPluginByName(command).description yield channel, (description) else: for plugin in manager.getAllPlugins(): plugins.append(plugin.name) yield channel, (', '.join(plugins))
def start_baseline(request): manager = PluginManager() plugin_name = request.data["plugin_name"] plugin = manager.getPluginByName(plugin_name) board = bci.OpenBCIBoard(port=args.port, daisy=args.daisy, filter_data=args.filtering, scaled_output=True, log=args.log) if (plugin_name == "packets_to_csv"): plugin.plugin_object.pre_activate( {}, sample_rate=board.getSampleRate(), eeg_channels=board.getNbEEGChannels(), aux_channels=board.getNbAUXChannels())
def run(): # pragma: no cover plugin_manager = PluginManager( categories_filter={'py2swagger': Py2SwaggerPlugin}, directories_list=[os.path.join(os.path.dirname(__file__), 'plugins')], plugin_info_ext='py2swagger' ) plugin_manager.collectPlugins() parser = argparse.ArgumentParser(description='Swagger schema builder') parser.add_argument('-c', '--config', action='store', dest='config', help='Path to config file') parser.add_argument('-r', '--root', action='store', dest='root', help='Path to project root. Default is current directory or configuration file location') parser.add_argument('-o', '--output', action='store', dest='output', help='Output file (Default stdout)') sub_parsers = parser.add_subparsers(title='plugins', dest='plugin') # set arguments from plugins for plugin in plugin_manager.getAllPlugins(): sub_parser = sub_parsers.add_parser(plugin.name, help=plugin.plugin_object.summary, description=plugin.plugin_object.description) plugin.plugin_object.set_parser_arguments(sub_parser) args = parser.parse_args() sys.path.append(_get_project_root_path(args.root, args.config)) swagger_settings, plugin_settings = get_settings(args.config) plugin = plugin_manager.getPluginByName(args.plugin, category='py2swagger') if not plugin: sys.stderr.write('Plugin not available\n') sys.exit(1) try: swagger_settings_part = plugin.plugin_object.run(args, **plugin_settings) except Py2SwaggerPluginException as e: sys.stderr.write('{}\n'.format(e)) sys.exit(1) swagger_settings = update_settings(swagger_settings, swagger_settings_part) builder = SchemaBuilder(**swagger_settings) swagger_schema = json.dumps(builder.schema, indent=2) if args.output: with codecs.open(args.output, 'wb', encoding='utf-8') as f: f.write(swagger_schema) else: sys.stdout.write(swagger_schema)
def __init__(self): config = self.__load_settings() for key, value in dict(config.items('cluster')).iteritems(): setattr(self, str(key), str(value)) # Defaults to omd if no plugin defined. plugin_name=(self.load_plugin if hasattr(self,'load_plugin') else 'omd') # Load plugins manager = PluginManager() manager.setPluginPlaces(["/home/cluster/clusterlib/plugins"]) manager.collectPlugins() # Expose plugin. self.plugin = manager.getPluginByName(plugin_name, category='Default')
def __init__(self): config = self.__load_settings() for key, value in dict(config.items('cluster')).iteritems(): setattr(self, str(key), str(value)) # Defaults to omd if no plugin defined. plugin_name = (self.load_plugin if hasattr(self, 'load_plugin') else 'omd') # Load plugins manager = PluginManager() manager.setPluginPlaces(["/home/cluster/clusterlib/plugins"]) manager.collectPlugins() # Expose plugin. self.plugin = manager.getPluginByName(plugin_name, category='Default')
class fbbot(fbchat.Client): pluginManager = None user_keyword_vector = {} admin_keyword_vector = {} def __init__(self, email, password, debug=True, user_agent=None): fbchat.Client.__init__(self, email, password, debug, user_agent) self.pluginManager = PluginManager() self.pluginManager.setPluginPlaces(['plugins']) self.pluginManager.collectPlugins() #Register all the keywords and the name of their respective plugin for plugin in self.pluginManager.getAllPlugins(): try: self.pluginManager.activatePluginByName(plugin.name) for key in plugin.plugin_object.user_keyword_vector.keys(): self.user_keyword_vector[key] = plugin.name for key in plugin.plugin_object.admin_keyword_vector.keys(): self.admin_keyword_vector[key] = plugin.name except Exception as ex: print(ex) def on_message(self, mid, author_id, author_name, message, metadata): self.markAsDelivered(author_id, mid) self.markAsRead(author_id) #Check for self author if str(author_id) != str(self.uid): #trigger word if message.startswith("/bot"): words = message.split() if words[1] in self.user_keyword_vector.keys(): plugin = self.pluginManager.getPluginByName( self.user_keyword_vector[words[1]]) response = plugin.plugin_object.user_keyword_vector[ words[1]](words) else: response = "Unrecognized Command" #Send the response sent = self.send(author_id, response) if sent: print("Sent", response, "to", author_id) else: print("Failed", response, "to", author_id)
class ProcessingManager(object): def __init__(self): self.plugin_location = [PROCESSING_MODULE_DIR] self.pluginManager = PluginManager(plugin_info_ext="plugin") self.pluginManager.setPluginPlaces(self.plugin_location) self.pluginManager.collectPlugins() def modules_info(self): """ Get information with regards to each modules loaded. It includes author,category, copyright, description, details, name, version and website. :return: information of all modules loaded """ modules_info = {} for pluginInfo in self.pluginManager.getAllPlugins(): modules_info[pluginInfo.name] = pluginInfo return modules_info def run(self,session_id): results = {} for plugInfo in self.pluginManager.getAllPlugins(): result = plugInfo.plugin_object.run(session_id) #format int json result_json = json.dumps(result, sort_keys=True, indent=4) #save into session module_name = plugInfo.name results[module_name] = result_json return results def general_information(self, session_id): """ Get general information of the session :param session_id: :return: dict """ plugin = self.pluginManager.getPluginByName(name="general_information") result = plugin.plugin_object.run(session_id) return result
class App(QtGui.QApplication): def __init__(self, argv): super(App, self).__init__(argv) parser = optparse.OptionParser() parser.add_option('-c', '--config', help='Path to a configuration', default='/etc/bonehead.cfg') (options, args) = parser.parse_args() logging.basicConfig(level=logging.DEBUG) self._conf = ConfigParser() self._conf.read(options.config) self._plugins = PluginManager() self._plugins.setPluginPlaces([ '/usr/lib/bonehead/pages', './pages/' ]) self._plugins.collectPlugins() for plugin in self._plugins.getAllPlugins(): self._plugins.activatePluginByName(plugin.name) self.__ui = KioskUI() pages = self._conf.get('general', 'pages', []).split(',') for pageName in pages: pageConfig = {} for k,v in self._conf.items("page:%s"%(pageName)): pageConfig[k] = v pagePlugin = self._plugins.getPluginByName(pageConfig['plugin']) page = pagePlugin.plugin_object.newPage(pageName, pageConfig, self.__ui) self.__ui.addPage(page) self.__ui.showFullScreen() def notify(self, object, event): if event.type() == QtCore.QEvent.KeyPress or event.type() == QtCore.QEvent.MouseButtonPress: self.__ui.resetTimeout() return super(App, self).notify(object, event)
def save(message): pluginManager = PluginManager() pluginManager.setPluginPlaces( [current_app.config["PLUGINS_PATH"] + os.sep + "validators"]) pluginManager.collectPlugins() for pluginInfo in pluginManager.getAllPlugins(): pluginManager.activatePluginByName(pluginInfo.name) pluginInfo = pluginManager.getPluginByName(message['name']) plugin_info_file = open( current_app.config["PLUGINS_PATH"] + os.sep + "validators" + os.sep + str(message["name"]).replace(" ", "") + ".yapsy-plugin", "w") plugin_file = open( current_app.config["PLUGINS_PATH"] + os.sep + "validators" + os.sep + str(message["name"]).replace(" ", "") + ".py", "w") plugin_info_file.write( str(message["file_info"]).decode("base64", errors="strict")) plugin_file.write(str(message["data"]).decode("base64")) plugin_info_file.close() plugin_file.close()
def generate(from_dir, output_dir): from_dir = os.path.abspath(from_dir) output_dir = os.path.abspath(output_dir) create_output_dir(output_dir) copy_static_dir(from_dir, output_dir) # Load settings site = load_mypoint(from_dir) settings = site.pop('settings') #TODO: rename to from_dir settings['from_dir'] = from_dir settings['output_dir'] = output_dir manager = PluginManager() manager.setPluginPlaces(["plugins"]) manager.collectPlugins() for plugin in manager.getAllPlugins(): plugin.plugin_object.activate(settings) for a in site.pop('apis'): plugin = manager.getPluginByName(a.pop('plugin')).plugin_object click.echo(plugin.generate(**a))
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ EXTRA_PLUGINS = ["planetoid", "ipynb", "local_search", "render_mustache"] def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_month = defaultdict(list) self.posts_per_tag = defaultdict(list) self.timeline = [] self.pages = [] self._scanned = False if not config: self.configured = False else: self.configured = True # This is the default config self.config = { "ADD_THIS_BUTTONS": True, "ANALYTICS": "", "ARCHIVE_PATH": "", "ARCHIVE_FILENAME": "archive.html", "CACHE_FOLDER": "cache", "CODE_COLOR_SCHEME": "default", "COMMENTS_IN_GALLERIES": False, "COMMENTS_IN_STORIES": False, "CONTENT_FOOTER": "", "CREATE_MONTHLY_ARCHIVE": False, "DATE_FORMAT": "%Y-%m-%d %H:%M", "DEFAULT_LANG": "en", "DEPLOY_COMMANDS": [], "DISABLED_PLUGINS": (), "DISQUS_FORUM": "nikolademo", "ENABLED_EXTRAS": (), "EXTRA_HEAD_DATA": "", "FAVICONS": {}, "FILE_METADATA_REGEXP": None, "FILES_FOLDERS": {"files": ""}, "FILTERS": {}, "GALLERY_PATH": "galleries", "GZIP_FILES": False, "GZIP_EXTENSIONS": (".txt", ".htm", ".html", ".css", ".js", ".json"), "HIDE_UNTRANSLATED_POSTS": False, "INDEX_DISPLAY_POST_COUNT": 10, "INDEX_TEASERS": False, "INDEXES_TITLE": "", "INDEXES_PAGES": "", "INDEX_PATH": "", "LICENSE": "", "LISTINGS_FOLDER": "listings", "MAX_IMAGE_SIZE": 1280, "MATHJAX_CONFIG": "", "OLD_THEME_SUPPORT": True, "OUTPUT_FOLDER": "output", "post_compilers": { "rest": (".txt", ".rst"), "markdown": (".md", ".mdown", ".markdown"), "textile": (".textile",), "txt2tags": (".t2t",), "bbcode": (".bb",), "wiki": (".wiki",), "ipynb": (".ipynb",), "html": (".html", ".htm"), }, "POST_PAGES": ( ("posts/*.txt", "posts", "post.tmpl", True), ("stories/*.txt", "stories", "story.tmpl", False), ), "REDIRECTIONS": [], "RSS_LINK": None, "RSS_PATH": "", "RSS_TEASERS": True, "SEARCH_FORM": "", "SLUG_TAG_PATH": True, "STORY_INDEX": False, "STRIP_INDEX_HTML": False, "TAG_PATH": "categories", "TAG_PAGES_ARE_INDEXES": False, "THEME": "site", "THEME_REVEAL_CONGIF_SUBTHEME": "sky", "THEME_REVEAL_CONGIF_TRANSITION": "cube", "THUMBNAIL_SIZE": 180, "USE_BUNDLES": True, "USE_CDN": False, "USE_FILENAME_AS_TITLE": True, "TIMEZONE": None, } self.config.update(config) self.config["TRANSLATIONS"] = self.config.get("TRANSLATIONS", {self.config["DEFAULT_" "LANG"]: ""}) self.THEMES = utils.get_theme_chain(self.config["THEME"]) self.MESSAGES = utils.load_messages(self.THEMES, self.config["TRANSLATIONS"], self.config["DEFAULT_LANG"]) # SITE_URL is required, but if the deprecated BLOG_URL # is available, use it and warn if "SITE_URL" not in self.config: if "BLOG_URL" in self.config: print("WARNING: You should configure SITE_URL instead of BLOG_URL") self.config["SITE_URL"] = self.config["BLOG_URL"] self.default_lang = self.config["DEFAULT_LANG"] self.translations = self.config["TRANSLATIONS"] # BASE_URL defaults to SITE_URL if "BASE_URL" not in self.config: self.config["BASE_URL"] = self.config.get("SITE_URL") self.plugin_manager = PluginManager( categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, } ) self.plugin_manager.setPluginInfoExtension("plugin") self.plugin_manager.setPluginPlaces( [str(os.path.join(os.path.dirname(__file__), "plugins")), str(os.path.join(os.getcwd(), "plugins"))] ) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("Command"): if plugin_info.name in self.config["DISABLED_PLUGINS"] or ( plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config["ENABLED_EXTRAS"] ): self.plugin_manager.removePluginFromCategory(plugin_info, "Command") continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) plugin_info.plugin_object.short_help = plugin_info.description self.commands[plugin_info.name] = plugin_info.plugin_object # Activate all task plugins for task_type in ["Task", "LateTask"]: for plugin_info in self.plugin_manager.getPluginsOfCategory(task_type): if plugin_info.name in self.config["DISABLED_PLUGINS"] or ( plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config["ENABLED_EXTRAS"] ): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # set global_context for template rendering self.GLOBAL_CONTEXT = {} self.GLOBAL_CONTEXT["messages"] = self.MESSAGES self.GLOBAL_CONTEXT["_link"] = self.link self.GLOBAL_CONTEXT["set_locale"] = s_l self.GLOBAL_CONTEXT["rel_link"] = self.rel_link self.GLOBAL_CONTEXT["abs_link"] = self.abs_link self.GLOBAL_CONTEXT["exists"] = self.file_exists self.GLOBAL_CONTEXT["SLUG_TAG_PATH"] = self.config["SLUG_TAG_PATH"] self.GLOBAL_CONTEXT["add_this_buttons"] = self.config["ADD_THIS_BUTTONS"] self.GLOBAL_CONTEXT["index_display_post_count"] = self.config["INDEX_DISPLAY_POST_COUNT"] self.GLOBAL_CONTEXT["use_bundles"] = self.config["USE_BUNDLES"] self.GLOBAL_CONTEXT["use_cdn"] = self.config.get("USE_CDN") self.GLOBAL_CONTEXT["favicons"] = self.config["FAVICONS"] self.GLOBAL_CONTEXT["date_format"] = self.config.get("DATE_FORMAT", "%Y-%m-%d %H:%M") self.GLOBAL_CONTEXT["blog_author"] = self.config.get("BLOG_AUTHOR") self.GLOBAL_CONTEXT["blog_title"] = self.config.get("BLOG_TITLE") self.GLOBAL_CONTEXT["blog_url"] = self.config.get("SITE_URL", self.config.get("BLOG_URL")) self.GLOBAL_CONTEXT["blog_desc"] = self.config.get("BLOG_DESCRIPTION") self.GLOBAL_CONTEXT["analytics"] = self.config.get("ANALYTICS") self.GLOBAL_CONTEXT["translations"] = self.config.get("TRANSLATIONS") self.GLOBAL_CONTEXT["license"] = self.config.get("LICENSE") self.GLOBAL_CONTEXT["search_form"] = self.config.get("SEARCH_FORM") self.GLOBAL_CONTEXT["disqus_forum"] = self.config.get("DISQUS_FORUM") self.GLOBAL_CONTEXT["mathjax_config"] = self.config.get("MATHJAX_CONFIG") self.GLOBAL_CONTEXT["subtheme"] = self.config.get("THEME_REVEAL_CONGIF_SUBTHEME") self.GLOBAL_CONTEXT["transition"] = self.config.get("THEME_REVEAL_CONGIF_TRANSITION") self.GLOBAL_CONTEXT["content_footer"] = self.config.get("CONTENT_FOOTER") self.GLOBAL_CONTEXT["rss_path"] = self.config.get("RSS_PATH") self.GLOBAL_CONTEXT["rss_link"] = self.config.get("RSS_LINK") self.GLOBAL_CONTEXT["sidebar_links"] = utils.Functionary(list, self.config["DEFAULT_LANG"]) for k, v in self.config.get("SIDEBAR_LINKS", {}).items(): self.GLOBAL_CONTEXT["sidebar_links"][k] = v self.GLOBAL_CONTEXT["twitter_card"] = self.config.get("TWITTER_CARD", {}) self.GLOBAL_CONTEXT["extra_head_data"] = self.config.get("EXTRA_HEAD_DATA") self.GLOBAL_CONTEXT.update(self.config.get("GLOBAL_CONTEXT", {})) # check if custom css exist and is not empty for files_path in list(self.config["FILES_FOLDERS"].keys()): custom_css_path = os.path.join(files_path, "assets/css/custom.css") if self.file_exists(custom_css_path, not_empty=True): self.GLOBAL_CONTEXT["has_custom_css"] = True break else: self.GLOBAL_CONTEXT["has_custom_css"] = False # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName(template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading {0} template system " "plugin\n".format(template_sys_name)) sys.exit(1) self.template_system = pi.plugin_object lookup_dirs = [os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES] self.template_system.set_directories(lookup_dirs, self.config["CACHE_FOLDER"]) # Check consistency of USE_CDN and the current THEME (Issue #386) if self.config["USE_CDN"]: bootstrap_path = utils.get_asset_path(os.path.join("assets", "css", "bootstrap.min.css"), self.THEMES) if bootstrap_path.split(os.sep)[-4] != "site": warnings.warn( "The USE_CDN option may be incompatible with your theme, because it uses a hosted version of bootstrap." ) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for plugin_info in self.plugin_manager.getPluginsOfCategory("PageCompiler"): self.compilers[plugin_info.name] = plugin_info.plugin_object.compile_html def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.post_compilers` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in list(self.config["post_compilers"].items()) if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit( "Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'post_compilers' in conf.py\n(The error is in" "one of {0})".format(", ".join(langs)) ) elif len(langs) > 1: langs = langs[:1] else: exit("post_compilers in conf.py does not tell me how to " "handle '{0}' extensions.".format(ext)) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.GLOBAL_CONTEXT) local_context.update(context) data = self.template_system.render_template(template_name, None, local_context) assert output_name.startswith(self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1 :] # Treat our site as if output/ is "/" and then make all URLs relative, # making the site "relocatable" src = os.sep + url_part src = os.path.normpath(src) # The os.sep is because normpath will change "/" to "\" on windows src = "/".join(src.split(os.sep)) parsed_src = urlsplit(src) src_elems = parsed_src.path.split("/")[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == "link": # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip("/"), context["lang"]) else: return dst # Normalize dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split("/")[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = "/".join([".."] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b"<!DOCTYPE html>" + lxml.html.tostring(doc, encoding="utf8") with open(output_name, "wb+") as post_file: post_file.write(data) def current_lang(self): # FIXME: this is duplicated, turn into a mixin """Return the currently set locale, if it's one of the available translations, or default_lang.""" lang = utils.LocaleBorg().current_lang if lang: if lang in self.translations: return lang lang = lang.split("_")[0] if lang in self.translations: return lang # whatever return self.default_lang def path(self, kind, name, lang=None, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) * post_path (name is 1st element in a post_pages tuple) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ if lang is None: lang = self.current_lang() path = [] if kind == "tag_index": path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["TAG_PATH"], "index.html"] if _f] elif kind == "tag": if self.config["SLUG_TAG_PATH"]: name = utils.slugify(name) path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["TAG_PATH"], name + ".html"] if _f] elif kind == "tag_rss": if self.config["SLUG_TAG_PATH"]: name = utils.slugify(name) path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["TAG_PATH"], name + ".xml"] if _f] elif kind == "index": if name not in [None, 0]: path = [ _f for _f in [ self.config["TRANSLATIONS"][lang], self.config["INDEX_PATH"], "index-{0}.html".format(name), ] if _f ] else: path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["INDEX_PATH"], "index.html"] if _f] elif kind == "post_path": path = [_f for _f in [self.config["TRANSLATIONS"][lang], os.path.dirname(name), "index.html"] if _f] elif kind == "rss": path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["RSS_PATH"], "rss.xml"] if _f] elif kind == "archive": if name: path = [ _f for _f in [self.config["TRANSLATIONS"][lang], self.config["ARCHIVE_PATH"], name, "index.html"] if _f ] else: path = [ _f for _f in [ self.config["TRANSLATIONS"][lang], self.config["ARCHIVE_PATH"], self.config["ARCHIVE_FILENAME"], ] if _f ] elif kind == "gallery": path = [_f for _f in [self.config["GALLERY_PATH"], name, "index.html"] if _f] elif kind == "listing": path = [_f for _f in [self.config["LISTINGS_FOLDER"], name + ".html"] if _f] if is_link: link = "/" + ("/".join(path)) if self.config["STRIP_INDEX_HTML"] and link.endswith("/index.html"): return link[:-10] else: return link else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config["BASE_URL"], dst) return urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urljoin(self.config["BASE_URL"], src) dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split("/")[1:] dst_elems = parsed_dst.path.split("/")[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return "/".join([".."] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self): def create_gzipped_copy(in_path, out_path): with gzip.GzipFile(out_path, "wb+") as outf: with open(in_path, "rb") as inf: outf.write(inf.read()) def flatten(task): if isinstance(task, dict): yield task else: for t in task: for ft in flatten(t): yield ft def add_gzipped_copies(task): if not self.config["GZIP_FILES"]: return None if task.get("name") is None: return None gzip_task = { "file_dep": [], "targets": [], "actions": [], "basename": "gzip", "name": task.get("name") + ".gz", "clean": True, } targets = task.get("targets", []) flag = False for target in targets: ext = os.path.splitext(target)[1] if ext.lower() in self.config["GZIP_EXTENSIONS"] and target.startswith(self.config["OUTPUT_FOLDER"]): flag = True gzipped = target + ".gz" gzip_task["file_dep"].append(target) gzip_task["targets"].append(gzipped) gzip_task["actions"].append((create_gzipped_copy, (target, gzipped))) if not flag: return None return gzip_task if self.config["GZIP_FILES"]: task_dep = ["gzip"] else: task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): for task in flatten(pluginInfo.plugin_object.gen_tasks()): gztask = add_gzipped_copies(task) if gztask: yield gztask yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): for task in pluginInfo.plugin_object.gen_tasks(): gztask = add_gzipped_copies(task) if gztask: yield gztask yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield {"name": b"all", "actions": None, "clean": True, "task_dep": task_dep} def scan_posts(self): """Scan all the posts.""" if self._scanned: return print("Scanning posts", end="") tzinfo = None if self.config["TIMEZONE"] is not None: tzinfo = pytz.timezone(self.config["TIMEZONE"]) targets = set([]) for wildcard, destination, template_name, use_in_feeds in self.config["post_pages"]: print(".", end="") dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.normpath(os.path.join(destination, os.path.relpath(dirpath, dirname))) full_list = glob.glob(dir_glob) # Now let's look for things that are not in default_lang for lang in self.config["TRANSLATIONS"].keys(): lang_glob = dir_glob + "." + lang translated_list = glob.glob(lang_glob) for fname in translated_list: orig_name = os.path.splitext(fname)[0] if orig_name in full_list: continue full_list.append(orig_name) for base_path in full_list: post = Post( base_path, self.config["CACHE_FOLDER"], dest_dir, use_in_feeds, self.config["TRANSLATIONS"], self.config["DEFAULT_LANG"], self.config["BASE_URL"], self.MESSAGES, template_name, self.config["FILE_METADATA_REGEXP"], self.config["STRIP_INDEX_HTML"], tzinfo, self.config["HIDE_UNTRANSLATED_POSTS"], ) for lang, langpath in list(self.config["TRANSLATIONS"].items()): dest = (destination, langpath, dir_glob, post.meta[lang]["slug"]) if dest in targets: raise Exception( "Duplicated output path {0!r} " "in post {1!r}".format(post.meta[lang]["slug"], base_path) ) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[str(post.date.year)].append(post.post_name) self.posts_per_month["{0}/{1:02d}".format(post.date.year, post.date.month)].append( post.post_name ) for tag in post.tags: self.posts_per_tag[tag].append(post.post_name) else: self.pages.append(post) if self.config["OLD_THEME_SUPPORT"]: post._add_old_metadata() for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!") def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + self.template_system.template_deps(post.template_name) context["post"] = post context["lang"] = lang context["title"] = post.title(lang) context["description"] = post.description(lang) context["permalink"] = post.permalink(lang) context["page_list"] = self.pages if post.use_in_feeds: context["enable_comments"] = True else: context["enable_comments"] = self.config["COMMENTS_IN_STORIES"] output_name = os.path.join(self.config["OUTPUT_FOLDER"], post.destination_path(lang)) deps_dict = copy(context) deps_dict.pop("post") if post.prev_post: deps_dict["PREV_LINK"] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict["NEXT_LINK"] = [post.next_post.permalink(lang)] deps_dict["OUTPUT_FOLDER"] = self.config["OUTPUT_FOLDER"] deps_dict["TRANSLATIONS"] = self.config["TRANSLATIONS"] deps_dict["global"] = self.GLOBAL_CONTEXT deps_dict["comments"] = context["enable_comments"] task = { "name": os.path.normpath(output_name), "file_dep": deps, "targets": [output_name], "actions": [(self.render_template, [post.template_name, output_name, context])], "clean": True, "uptodate": [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config["BLOG_TITLE"] context["description"] = self.config["BLOG_DESCRIPTION"] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.meta[lang]["title"], p.permalink(lang)) for p in posts] deps_context["global"] = self.GLOBAL_CONTEXT task = { "name": os.path.normpath(output_name), "targets": [output_name], "file_dep": deps, "actions": [(self.render_template, [template_name, output_name, context])], "clean": True, "uptodate": [config_changed(deps_context)], } return utils.apply_filters(task, filters)
class PluginSystem: def __init__(self, config=None, extra_plugin_paths=[]): 'Initialize a new PluginSystem. Needs no arguments.' # Build the manager self._mngr = PluginManager(plugin_info_ext='plugin') # Tell it the default place(s) where to find plugins plugin_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'plugins') self._mngr.setPluginPlaces([plugin_path] + extra_plugin_paths) # find the categories specified in moosecat/plugins/__init__.py self._categories = _get_interfaces_from_module() LOGGER.debug('Discovered following plugins categories: ' + ', '.join(self.list_categories())) # tell yapsy about our extra categories self._mngr.setCategoriesFilter(self._categories) try: self._mngr.collectPlugins() except SystemError as e: LOGGER.exception('Some plugin could not be loaded.') # Get a list of plugin names to load load_these = [pluginInfo.name for pluginInfo in self._mngr.getAllPlugins()] if config is not None: config.add_defaults({'plugins_to_load': load_these}) load_these = config.get('plugins_to_load') # Actually load them for name in load_these: self._mngr.activatePluginByName(name) def list_plugin_info(self): ''' Get a list of PluginInfo objects. These contain additionnal metadata found in the .plugin files. See here for more information: http://yapsy.sourceforge.net/PluginInfo.html Useful if you want to realize a list of Plugins. :returns: A list of PluginInfo objects ''' return self._mngr.getAllPlugins() def list_plugin_info_by_category(self, name): return self._mngr.getPluginsOfCategory(name) def list_categories(self): 'Get a string list of categories.' return self._categories.keys() def by_name(self, plugin_name): 'Access a Plugin by its actual name - discouraged' return self._mngr.getPluginByName(plugin_name) @functools.lru_cache(maxsize=25) def category(self, name): ''' Get a list of all Plugins of a Category. :name: The name of a category. :returns: A list of instances of a certain Plugin Classes. ''' cat = [info.plugin_object for info in self.list_plugin_info_by_category(name)] cat.sort(key=lambda pobj: pobj.priority(), reverse=True) return cat def first(self, name): ''' Shortcut for ``psys.categories(name)[0]`` Will return None if no plugins for this category. :name: A Category Name, Same as in ``category()`` :returns: An Instance of a certain Plugin Class. ''' cat = self.category(name) if len(cat) > 0: return cat[0]
class GeneManager(): """ Merge data available in ?elasticsearch into proper json objects plugin_paths is a collection of filesystem paths to search for potential plugins plugin_names is an ordered collection of class names of plugins which determines the order they are handled in """ def __init__(self, loader, r_server, plugin_paths, plugin_order): self.loader = loader self.r_server = r_server self.genes = GeneSet() self._logger = logging.getLogger(__name__) self._logger.info("Preparing the plug in management system") # Build the manager self.simplePluginManager = PluginManager() # Tell it the default place(s) where to find plugins self.simplePluginManager.setPluginPlaces(plugin_paths) for dir in plugin_paths: self._logger.debug("Looking for plugins in %s", dir) # Load all plugins self.simplePluginManager.collectPlugins() self.plugin_order = plugin_order def merge_all(self, data_config, dry_run = False): for plugin_name in self.plugin_order: plugin = self.simplePluginManager.getPluginByName(plugin_name) plugin.plugin_object.print_name() plugin.plugin_object.merge_data(genes=self.genes, loader=self.loader, r_server=self.r_server, data_config=data_config) self._store_data(dry_run=dry_run) def _store_data(self, dry_run = False): if not dry_run: self.loader.create_new_index(Const.ELASTICSEARCH_GENE_NAME_INDEX_NAME) #need to directly get the versioned index name for this function self.loader.prepare_for_bulk_indexing( self.loader.get_versioned_index(Const.ELASTICSEARCH_GENE_NAME_INDEX_NAME)) for geneid, gene in self.genes.iterate(): gene.preprocess() if not dry_run: self.loader.put(Const.ELASTICSEARCH_GENE_NAME_INDEX_NAME, Const.ELASTICSEARCH_GENE_NAME_DOC_NAME, geneid, gene.to_json()) if not dry_run: self.loader.flush_all_and_wait(Const.ELASTICSEARCH_GENE_NAME_INDEX_NAME) #restore old pre-load settings #note this automatically does all prepared indexes self.loader.restore_after_bulk_indexing() self._logger.info('all gene objects pushed to elasticsearch') """ Run a series of QC tests on EFO elasticsearch index. Returns a dictionary of string test names and result objects """ def qc(self, esquery): #number of gene entries gene_count = 0 #Note: try to avoid doing this more than once! for gene_entry in esquery.get_all_targets(): gene_count += 1 #put the metrics into a single dict metrics = dict() metrics["gene.count"] = gene_count return metrics
class PluginsManagerService(object): """ Description: main class in order to manage all the plugin operations like loading and triggering """ def __init__(self): self.plugin_manager = PluginManager() self.loaded_plugin_list = [] self.loaded_plugin_objects = None self.plugin_db = Plugins.Plugins(mongodb) self.plugin_location = os.path.abspath( os.path.join(os.getcwd(), 'Plugins')) self.deleted_plugin_location = os.path.abspath( os.path.join(os.getcwd(), 'Plugins_deleted')) self.load_plugin() def load_plugin(self): """loads all plugins from folder and activates the plugins Returns: :dict: active_plugins_dict_from_db Raises: no_value no_active_plugins """ # Build the manager # Tell it the default place(s) where to find plugins print "###################" print "STARTED PLUGIN LOAD " print "###################" if os.path.isdir(self.plugin_location) is True: try: # set plugins from folder self.plugin_manager.setPluginPlaces([self.plugin_location]) # define interfaces to load you have defined self.plugin_manager.setCategoriesFilter({ "PreDeploymentPlugin": IPreDeploymentPlugin, "PostDeploymentPlugin": IPostDeploymentPlugin }) # Load all plugins from plugin folder self.plugin_manager.collectPlugins() loaded_plugin_from_folder = [ x.name for x in self.plugin_manager.getAllPlugins() ] # validate plugin if not valid deactivate for plugin in loaded_plugin_from_folder: if not self.is_valid_plugin(plugin): self.deactivate(plugin) print( "the following plugin is deactivate cause is not valid: " + plugin) # update DB with new plugins self.detect_new_plugins() # load all activated plugins from DB active_plugins_dict_from_db = self.plugin_db.get_all_plugin( status='active') active_plugins_in_db = [ x['name'] for x in self.plugin_db.get_all_plugin(status='active') ] # final active_plugins list if active_plugins_in_db is None: print "no plugins installed" elif loaded_plugin_from_folder is None: print "no plugins in the plugins folder" else: active_plugins = [ x for x in loaded_plugin_from_folder if x in active_plugins_in_db ] for plugin in active_plugins: # validate plugin if not valid deactivate if not self.is_valid_plugin(plugin): print "plugin is not valid: " + plugin + " deactivate" self.deactivate(plugin) else: # activate self.plugin_manager.activatePluginByName(plugin) print "loaded plugin name: " + plugin ValueError("loaded plugin name: " + plugin) print "###################" print "COMPLETED PLUGIN LOAD " print "###################" return active_plugins_dict_from_db except (Exception, ValueError) as error: print error raise ValueError("Unable to load plugins :", error) elif os.path.isdir(self.plugin_location) is False: raise ValueError("plugin folder is set to " + self.plugin_location + " missing or not configured well") else: raise ValueError("unknown err during plugin loading") def detect_new_plugins(self): """ updates new plugins in db :param loaded_plugin_from_folder: :return: """ # load all from db loaded_plugin_from_folder = self.plugin_manager.getAllPlugins() plugins_in_db = [ x.get('name') for x in self.plugin_db.get_all_plugin() ] if not plugins_in_db: new_plugins = loaded_plugin_from_folder else: new_plugins = [ x for x in loaded_plugin_from_folder if x.name not in plugins_in_db ] for plugin_object in new_plugins: # get plugin type parent_class = type(plugin_object.plugin_object).__bases__ parent_class_name = parent_class[0].__name__ plugin_dict = { 'name': plugin_object.name, 'category': parent_class_name[1:], 'author': plugin_object.author, 'version': str(plugin_object.version), 'description': plugin_object.description, 'status': 'inactive' } try: self.plugin_db.add_plugin(plugin_dict) except Exception as error: print error raise RuntimeError("unable to insert the plugin to DB") def install_plugin(self, plugin_name): ''' activates the plugin and triggers the install method of the plugin :param plugin_name: string :return: :raises RuntimeError "unable to insert the plugin to DB" ''' # set plugins from folder # self.plugin_location = os.path.abspath(os.path.join(os.getcwd(), 'Plugins')) # self.load_plugin() plugin_details = self.plugin_db.get_plugin_by_name(plugin_name) plugin_object = self.plugin_manager.getPluginByName( plugin_name, plugin_details.get('category')) if plugin_object: try: self.activate(plugin_name) return plugin_object.plugin_object.install() except Exception as error: print error raise RuntimeError("unable to insert the plugin to DB: ", error) else: raise ValueError("no active plugin with this name") def is_valid_plugin(self, plugin): ''' validate that plugin has all the required methods and attributes :param plugin: :return: True or False ''' # mandatory element lists plugin_mandatory_attributes = [ 'author', 'name', 'version', 'description' ] plugin_mandatory_methods = [ 'preform', 'activate', 'deactivate', 'is_activated' ] # get plugin plugin = self.plugin_manager.getPluginByName(plugin, 'PreDeploymentPlugin') # check plugin manifest try: for attribute in plugin_mandatory_attributes: if str(getattr(plugin, attribute, None)) == "": raise ValueError("plugin is missing value in " + attribute) except (ValueError, AttributeError) as error: print error return False # check plugin methods try: methods = [ x for x in plugin_mandatory_methods if x not in dir(plugin.plugin_object) ] if len(methods) > 0: raise ValueError("folowwing methods are missing " + str(methods)) except (ValueError, AttributeError) as error: print error return False return True def uninstall_plugin(self, plugin): ''' triggers the uninstall method of the plugin by name remove the db details rename the plugin folder :param plugin_name: ''' try: # get category form DB plugin_details = self.plugin_db.get_plugin_by_name(plugin) if plugin_details: plugin_object = self.plugin_manager.getPluginByName( plugin, plugin_details.get('category')) # uninstal plugin_object.plugin_object.uninstall() # delete from db self.plugin_db.delete_plugin_by_name(plugin) # check if deleted folder exists if os.path.isdir(self.deleted_plugin_location) is not True: os.mkdir(self.deleted_plugin_location) # delete folder os.rename( self.plugin_location + "/" + plugin, self.deleted_plugin_location + "/" + plugin + ".deleted") else: raise ValueError('couldnt find plugin') except (ValueError, Exception) as error: print error raise RuntimeError(" could not uninstall plugin " + plugin + " " + error) def activate(self, plugin_name): ''' activates plugin by name :param plugin_name: :raise RuntimeError "could not activate plugin" ''' try: # get category form DB plugin_details = self.plugin_db.get_plugin_by_name(plugin_name) # activate self.plugin_manager.activatePluginByName( plugin_name, plugin_details.get('category')) # updateDB self.plugin_db.update_plugin_status(plugin_name, 'active') except Exception as error: print error raise RuntimeError(" could not activate plugin " + plugin_name) def deactivate(self, plugin_name): ''' deactivates plugin by name :param plugin_name: string :raise RuntimeError "could not deactivate plugin" ''' try: # get category form DB plugin_details = self.plugin_db.get_plugin_by_name(plugin_name) # deactivate self.plugin_manager.deactivatePluginByName( plugin_name, plugin_details.get('category')) # updateDB self.plugin_db.update_plugin_status(plugin_name, 'inactive') except Exception as error: print error raise RuntimeError(" could not deactivate plugin " + plugin_name) def preform(self, category, plugin_name): """ execute the preform method of plugin in category :param plugin_name: string :param category: string :raises RuntimeError unable to trigger operation of plugin: """ try: plugin = self.plugin_manager.getPluginByName(plugin_name, category) plugin.plugin_object.preform() except Exception as error: print error raise RuntimeError(" unable to trigger operation of plugin:" + plugin_name) def preform_all_in_category(self, category, **keyargs): ''' triggers all preform methods of plugin in the provided category :param category: :return: :raises RuntimeError "unable to execute the plugin logic" ''' for (key, value) in keyargs.iteritems(): print(key, value) try: for plugin in self.plugin_manager.getPluginsOfCategory(category): print("preforming action of plugin " + plugin.name) plugin.plugin_object.preform() except (ValueError, Exception) as error: print error raise RuntimeError("unable to execute the plugin logic: " + error) # ## test def test(self): # just for test for p in self.plugin_manager.getPluginsOfCategory( 'PreDeploymentPlugin'): self.install_plugin('TestPlugin')
class Controller(object): """ The controller is responsible for handling all the preparations of the reader, and the reading from the model. The model is in the bci board, given as input to the class. """ def __init__(self, gui, bd, sets): # Initialising the plugin manager # self.manager = PluginManager( ) # Yapsy pluginmanager is used for plugins. self.plugins_paths = ["plugins" ] # Make sure the path for the plugins works self.manager.setPluginPlaces(self.plugins_paths) self.manager.collectPlugins() # # Setting need-to-know variables. # self.gui = gui # Here is the instantiation of the board. # self.board = bd self.settings = sets # Set of callback functions (plug-ins) that have been acitvated. # self.callback_list = [] self.plug_list = [] # ============================================================ # Internal functions for the controller. Most of these will be activated from the GUI. These are marked by the # type of initialisation, e.g. <MENU> # # Initialisation for the board, and setting it up for commands. # # <MENU, BUTTON> # def connect_board(self): self.model = self.board.OpenBCIBoard(self, self.gui, self.settings) self.activate_plugins() # <MENU> # def start_streaming(self): self.model.start_streaming_thread(self.callback_list) # <MENU> # def set_channels(self): pass def stop(self): self.model.stop() # Initialising the active channels. # # Method used to activate the plugins. If any plugin is not responding properly, an error message is generated. # This function is used initially, but can also be used during a run, for example to add new plugins, # if something interesting occurs. # def activate_plugins(self): self.plug_list = [] self.callback_list = [] # Fetch selected plugins from settings, try to activate them, add to the list if OK # plugs = self.settings.get_plugins() for plug_candidate in plugs: # first value: plugin name, then optional arguments # plug_name = plug_candidate[0] plug_args = plug_candidate[1:] # Try to find name # plug = self.manager.getPluginByName(plug_name) if plug == None: # eg: if an import failS inside a plugin, yapsy will skip it # print( "Error: [ " + plug_name + " ] not found or could not be loaded. Check name and requirements." ) else: print("\nActivating [ " + plug_name + " ] plugin...") if not plug.plugin_object.pre_activate( plug_args, sample_rate=self.model.get_sample_rate(), eeg_channels=self.model.get_nb_eeg_channels(), aux_channels=self.model.get_nb_aux_channels(), imp_channels=self.model.get_nb_imp_channels()): print("Error while activating [ " + plug_name + " ], check output for more info.") else: print("Plugin [ " + plug_name + "] added to the list") self.plug_list.append(plug.plugin_object) self.callback_list.append(plug.plugin_object) print(self.callback_list) # The deactivation of plugins is necessary for a clean closing of files, etc. # def clean_up(self): self.model.disconnect() print(dict.get_string('deactivate_plug')) for plug in self.plug_list: plug.deactivate() print(dict.get_string('exiting')) # ================================================= # =================================================
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ EXTRA_PLUGINS = [ 'planetoid', 'ipynb', 'local_search', 'render_mustache', ] def __init__(self, **config): """Setup proper environment for running tasks.""" # Register our own path handlers self.path_handlers = { 'slug': self.slug_path, 'post_path': self.post_path, } self.strict = False self.global_data = {} self.posts = [] self.posts_per_year = defaultdict(list) self.posts_per_month = defaultdict(list) self.posts_per_tag = defaultdict(list) self.posts_per_category = defaultdict(list) self.post_per_file = {} self.timeline = [] self.pages = [] self._scanned = False self._template_system = None self._THEMES = None self.debug = DEBUG self.loghandlers = [] if not config: self.configured = False else: self.configured = True # This is the default config self.config = { 'ADD_THIS_BUTTONS': True, 'ANNOTATIONS': False, 'ARCHIVE_PATH': "", 'ARCHIVE_FILENAME': "archive.html", 'BLOG_TITLE': 'Default Title', 'BLOG_DESCRIPTION': 'Default Description', 'BODY_END': "", 'CACHE_FOLDER': 'cache', 'CODE_COLOR_SCHEME': 'default', 'COMMENT_SYSTEM': 'disqus', 'COMMENTS_IN_GALLERIES': False, 'COMMENTS_IN_STORIES': False, 'COMPILERS': { "rest": ('.txt', '.rst'), "markdown": ('.md', '.mdown', '.markdown'), "textile": ('.textile',), "txt2tags": ('.t2t',), "bbcode": ('.bb',), "wiki": ('.wiki',), "ipynb": ('.ipynb',), "html": ('.html', '.htm') }, 'CONTENT_FOOTER': '', 'COPY_SOURCES': True, 'CREATE_MONTHLY_ARCHIVE': False, 'CREATE_SINGLE_ARCHIVE': False, 'DATE_FORMAT': '%Y-%m-%d %H:%M', 'DEFAULT_LANG': "en", 'DEPLOY_COMMANDS': [], 'DISABLED_PLUGINS': (), 'EXTRA_PLUGINS_DIRS': [], 'COMMENT_SYSTEM_ID': 'nikolademo', 'ENABLED_EXTRAS': (), 'EXTRA_HEAD_DATA': '', 'FAVICONS': {}, 'FEED_LENGTH': 10, 'FILE_METADATA_REGEXP': None, 'ADDITIONAL_METADATA': {}, 'FILES_FOLDERS': {'files': ''}, 'FILTERS': {}, 'GALLERY_PATH': 'galleries', 'GALLERY_SORT_BY_DATE': True, 'GZIP_COMMAND': None, 'GZIP_FILES': False, 'GZIP_EXTENSIONS': ('.txt', '.htm', '.html', '.css', '.js', '.json', '.xml'), 'HIDE_SOURCELINK': False, 'HIDE_UNTRANSLATED_POSTS': False, 'HYPHENATE': False, 'INDEX_DISPLAY_POST_COUNT': 10, 'INDEX_FILE': 'index.html', 'INDEX_TEASERS': False, 'INDEXES_TITLE': "", 'INDEXES_PAGES': "", 'INDEXES_PAGES_MAIN': False, 'INDEX_PATH': '', 'IPYNB_CONFIG': {}, 'LESS_COMPILER': 'lessc', 'LICENSE': '', 'LINK_CHECK_WHITELIST': [], 'LISTINGS_FOLDER': 'listings', 'NAVIGATION_LINKS': None, 'MARKDOWN_EXTENSIONS': ['fenced_code', 'codehilite'], 'MAX_IMAGE_SIZE': 1280, 'MATHJAX_CONFIG': '', 'OLD_THEME_SUPPORT': True, 'OUTPUT_FOLDER': 'output', 'POSTS': (("posts/*.txt", "posts", "post.tmpl"),), 'PAGES': (("stories/*.txt", "stories", "story.tmpl"),), 'PRETTY_URLS': False, 'FUTURE_IS_NOW': False, 'READ_MORE_LINK': '<p class="more"><a href="{link}">{read_more}…</a></p>', 'REDIRECTIONS': [], 'RSS_LINK': None, 'RSS_PATH': '', 'RSS_TEASERS': True, 'SASS_COMPILER': 'sass', 'SEARCH_FORM': '', 'SLUG_TAG_PATH': True, 'SOCIAL_BUTTONS_CODE': SOCIAL_BUTTONS_CODE, 'SITE_URL': 'http://getnikola.com/', 'STORY_INDEX': False, 'STRIP_INDEXES': False, 'SITEMAP_INCLUDE_FILELESS_DIRS': True, 'TAG_PATH': 'categories', 'TAG_PAGES_ARE_INDEXES': False, 'THEME': 'bootstrap', 'THEME_REVEAL_CONFIG_SUBTHEME': 'sky', 'THEME_REVEAL_CONFIG_TRANSITION': 'cube', 'THUMBNAIL_SIZE': 180, 'URL_TYPE': 'rel_path', 'USE_BUNDLES': True, 'USE_CDN': False, 'USE_FILENAME_AS_TITLE': True, 'TIMEZONE': 'UTC', 'DEPLOY_DRAFTS': True, 'DEPLOY_FUTURE': False, 'SCHEDULE_ALL': False, 'SCHEDULE_RULE': '', 'SCHEDULE_FORCE_TODAY': False, 'LOGGING_HANDLERS': {'stderr': {'loglevel': 'WARNING', 'bubble': True}}, 'DEMOTE_HEADERS': 1, } self.config.update(config) # Make sure we have pyphen installed if we are using it if self.config.get('HYPHENATE') and pyphen is None: utils.LOGGER.warn('To use the hyphenation, you have to install ' 'the "pyphen" package.') utils.LOGGER.warn('Setting HYPHENATE to False.') self.config['HYPHENATE'] = False # Deprecating post_compilers # TODO: remove on v7 if 'post_compilers' in config: utils.LOGGER.warn('The post_compilers option is deprecated, use COMPILERS instead.') if 'COMPILERS' in config: utils.LOGGER.warn('COMPILERS conflicts with post_compilers, ignoring post_compilers.') else: self.config['COMPILERS'] = config['post_compilers'] # Deprecating post_pages # TODO: remove on v7 if 'post_pages' in config: utils.LOGGER.warn('The post_pages option is deprecated, use POSTS and PAGES instead.') if 'POSTS' in config or 'PAGES' in config: utils.LOGGER.warn('POSTS and PAGES conflict with post_pages, ignoring post_pages.') else: self.config['POSTS'] = [item[:3] for item in config['post_pages'] if item[-1]] self.config['PAGES'] = [item[:3] for item in config['post_pages'] if not item[-1]] # FIXME: Internally, we still use post_pages because it's a pain to change it self.config['post_pages'] = [] for i1, i2, i3 in self.config['POSTS']: self.config['post_pages'].append([i1, i2, i3, True]) for i1, i2, i3 in self.config['PAGES']: self.config['post_pages'].append([i1, i2, i3, False]) # Deprecating DISQUS_FORUM # TODO: remove on v7 if 'DISQUS_FORUM' in config: utils.LOGGER.warn('The DISQUS_FORUM option is deprecated, use COMMENT_SYSTEM_ID instead.') if 'COMMENT_SYSTEM_ID' in config: utils.LOGGER.warn('DISQUS_FORUM conflicts with COMMENT_SYSTEM_ID, ignoring DISQUS_FORUM.') else: self.config['COMMENT_SYSTEM_ID'] = config['DISQUS_FORUM'] # Deprecating the ANALYTICS option # TODO: remove on v7 if 'ANALYTICS' in config: utils.LOGGER.warn('The ANALYTICS option is deprecated, use BODY_END instead.') if 'BODY_END' in config: utils.LOGGER.warn('ANALYTICS conflicts with BODY_END, ignoring ANALYTICS.') else: self.config['BODY_END'] = config['ANALYTICS'] # Deprecating the SIDEBAR_LINKS option # TODO: remove on v7 if 'SIDEBAR_LINKS' in config: utils.LOGGER.warn('The SIDEBAR_LINKS option is deprecated, use NAVIGATION_LINKS instead.') if 'NAVIGATION_LINKS' in config: utils.LOGGER.warn('The SIDEBAR_LINKS conflicts with NAVIGATION_LINKS, ignoring SIDEBAR_LINKS.') else: self.config['NAVIGATION_LINKS'] = config['SIDEBAR_LINKS'] # Compatibility alias self.config['SIDEBAR_LINKS'] = self.config['NAVIGATION_LINKS'] if self.config['NAVIGATION_LINKS'] in (None, {}): self.config['NAVIGATION_LINKS'] = {self.config['DEFAULT_LANG']: ()} # Deprecating the ADD_THIS_BUTTONS option # TODO: remove on v7 if 'ADD_THIS_BUTTONS' in config: utils.LOGGER.warn('The ADD_THIS_BUTTONS option is deprecated, use SOCIAL_BUTTONS_CODE instead.') if not config['ADD_THIS_BUTTONS']: utils.LOGGER.warn('Setting SOCIAL_BUTTONS_CODE to empty because ADD_THIS_BUTTONS is False.') self.config['SOCIAL_BUTTONS_CODE'] = '' # STRIP_INDEX_HTML config has been replaces with STRIP_INDEXES # Port it if only the oldef form is there # TODO: remove on v7 if 'STRIP_INDEX_HTML' in config and 'STRIP_INDEXES' not in config: utils.LOGGER.warn('You should configure STRIP_INDEXES instead of STRIP_INDEX_HTML') self.config['STRIP_INDEXES'] = config['STRIP_INDEX_HTML'] # PRETTY_URLS defaults to enabling STRIP_INDEXES unless explicitly disabled if config.get('PRETTY_URLS', False) and 'STRIP_INDEXES' not in config: self.config['STRIP_INDEXES'] = True if config.get('COPY_SOURCES') and not self.config['HIDE_SOURCELINK']: self.config['HIDE_SOURCELINK'] = True self.config['TRANSLATIONS'] = self.config.get('TRANSLATIONS', {self.config['DEFAULT_LANG']: ''}) # SITE_URL is required, but if the deprecated BLOG_URL # is available, use it and warn # TODO: remove on v7 if 'SITE_URL' not in self.config: if 'BLOG_URL' in self.config: utils.LOGGER.warn('You should configure SITE_URL instead of BLOG_URL') self.config['SITE_URL'] = self.config['BLOG_URL'] self.default_lang = self.config['DEFAULT_LANG'] self.translations = self.config['TRANSLATIONS'] locale_fallback, locale_default, locales = sanitized_locales( self.config.get('LOCALE_FALLBACK', None), self.config.get('LOCALE_DEFAULT', None), self.config.get('LOCALES', {}), self.translations) # NOQA utils.LocaleBorg.initialize(locales, self.default_lang) # BASE_URL defaults to SITE_URL if 'BASE_URL' not in self.config: self.config['BASE_URL'] = self.config.get('SITE_URL') # BASE_URL should *always* end in / if self.config['BASE_URL'] and self.config['BASE_URL'][-1] != '/': utils.LOGGER.warn("Your BASE_URL doesn't end in / -- adding it.") self.plugin_manager = PluginManager(categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, "TaskMultiplier": TaskMultiplier, "RestExtension": RestExtension, "SignalHandler": SignalHandler, }) self.plugin_manager.setPluginInfoExtension('plugin') extra_plugins_dirs = self.config['EXTRA_PLUGINS_DIRS'] if sys.version_info[0] == 3: places = [ os.path.join(os.path.dirname(__file__), 'plugins'), os.path.join(os.getcwd(), 'plugins'), ] + [path for path in extra_plugins_dirs if path] else: places = [ os.path.join(os.path.dirname(__file__), utils.sys_encode('plugins')), os.path.join(os.getcwd(), utils.sys_encode('plugins')), ] + [utils.sys_encode(path) for path in extra_plugins_dirs if path] self.plugin_manager.setPluginPlaces(places) self.plugin_manager.collectPlugins() # Activate all required SignalHandler plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("SignalHandler"): if plugin_info.name in self.config.get('DISABLED_PLUGINS'): self.plugin_manager.removePluginFromCategory(plugin_info, "SignalHandler") else: self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Emit signal for SignalHandlers which need to start running immediately. signal('sighandlers_loaded').send(self) self.commands = {} # Activate all command plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("Command"): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory(plugin_info, "Command") continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) plugin_info.plugin_object.short_help = plugin_info.description self.commands[plugin_info.name] = plugin_info.plugin_object # Activate all task plugins for task_type in ["Task", "LateTask"]: for plugin_info in self.plugin_manager.getPluginsOfCategory(task_type): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all multiplier plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("TaskMultiplier"): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all required compiler plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("PageCompiler"): if plugin_info.name in self.config["COMPILERS"].keys(): self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # set global_context for template rendering self._GLOBAL_CONTEXT = {} self._GLOBAL_CONTEXT['_link'] = self.link self._GLOBAL_CONTEXT['set_locale'] = utils.LocaleBorg().set_locale self._GLOBAL_CONTEXT['rel_link'] = self.rel_link self._GLOBAL_CONTEXT['abs_link'] = self.abs_link self._GLOBAL_CONTEXT['exists'] = self.file_exists self._GLOBAL_CONTEXT['SLUG_TAG_PATH'] = self.config['SLUG_TAG_PATH'] self._GLOBAL_CONTEXT['annotations'] = self.config['ANNOTATIONS'] self._GLOBAL_CONTEXT['index_display_post_count'] = self.config[ 'INDEX_DISPLAY_POST_COUNT'] self._GLOBAL_CONTEXT['use_bundles'] = self.config['USE_BUNDLES'] self._GLOBAL_CONTEXT['use_cdn'] = self.config.get("USE_CDN") self._GLOBAL_CONTEXT['favicons'] = self.config['FAVICONS'] self._GLOBAL_CONTEXT['date_format'] = self.config.get( 'DATE_FORMAT', '%Y-%m-%d %H:%M') self._GLOBAL_CONTEXT['blog_author'] = self.config.get('BLOG_AUTHOR') self._GLOBAL_CONTEXT['blog_title'] = self.config.get('BLOG_TITLE') # TODO: remove fallback in v7 self._GLOBAL_CONTEXT['blog_url'] = self.config.get('SITE_URL', self.config.get('BLOG_URL')) self._GLOBAL_CONTEXT['blog_desc'] = self.config.get('BLOG_DESCRIPTION') self._GLOBAL_CONTEXT['body_end'] = self.config.get('BODY_END') # TODO: remove in v7 self._GLOBAL_CONTEXT['analytics'] = self.config.get('BODY_END') # TODO: remove in v7 self._GLOBAL_CONTEXT['add_this_buttons'] = self.config.get('SOCIAL_BUTTONS_CODE') self._GLOBAL_CONTEXT['social_buttons_code'] = self.config.get('SOCIAL_BUTTONS_CODE') self._GLOBAL_CONTEXT['translations'] = self.config.get('TRANSLATIONS') self._GLOBAL_CONTEXT['license'] = self.config.get('LICENSE') self._GLOBAL_CONTEXT['search_form'] = self.config.get('SEARCH_FORM') self._GLOBAL_CONTEXT['comment_system'] = self.config.get('COMMENT_SYSTEM') self._GLOBAL_CONTEXT['comment_system_id'] = self.config.get('COMMENT_SYSTEM_ID') # TODO: remove in v7 self._GLOBAL_CONTEXT['disqus_forum'] = self.config.get('COMMENT_SYSTEM_ID') self._GLOBAL_CONTEXT['mathjax_config'] = self.config.get( 'MATHJAX_CONFIG') self._GLOBAL_CONTEXT['subtheme'] = self.config.get('THEME_REVEAL_CONFIG_SUBTHEME') self._GLOBAL_CONTEXT['transition'] = self.config.get('THEME_REVEAL_CONFIG_TRANSITION') self._GLOBAL_CONTEXT['content_footer'] = self.config.get( 'CONTENT_FOOTER') self._GLOBAL_CONTEXT['rss_path'] = self.config.get('RSS_PATH') self._GLOBAL_CONTEXT['rss_link'] = self.config.get('RSS_LINK') self._GLOBAL_CONTEXT['navigation_links'] = utils.Functionary(list, self.config['DEFAULT_LANG']) for k, v in self.config.get('NAVIGATION_LINKS', {}).items(): self._GLOBAL_CONTEXT['navigation_links'][k] = v # TODO: remove on v7 # Compatibility alias self._GLOBAL_CONTEXT['sidebar_links'] = self._GLOBAL_CONTEXT['navigation_links'] self._GLOBAL_CONTEXT['twitter_card'] = self.config.get( 'TWITTER_CARD', {}) self._GLOBAL_CONTEXT['hide_sourcelink'] = self.config.get( 'HIDE_SOURCELINK') self._GLOBAL_CONTEXT['extra_head_data'] = self.config.get('EXTRA_HEAD_DATA') self._GLOBAL_CONTEXT.update(self.config.get('GLOBAL_CONTEXT', {})) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for plugin_info in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): self.compilers[plugin_info.name] = \ plugin_info.plugin_object signal('configured').send(self) def _get_themes(self): if self._THEMES is None: # Check for old theme names (Issue #650) TODO: remove in v7 theme_replacements = { 'site': 'bootstrap', 'orphan': 'base', 'default': 'oldfashioned', } if self.config['THEME'] in theme_replacements: utils.LOGGER.warn('You are using the old theme "{0}", using "{1}" instead.'.format( self.config['THEME'], theme_replacements[self.config['THEME']])) self.config['THEME'] = theme_replacements[self.config['THEME']] if self.config['THEME'] == 'oldfashioned': utils.LOGGER.warn('''You may need to install the "oldfashioned" theme ''' '''from themes.nikola.ralsina.com.ar because it's not ''' '''shipped by default anymore.''') utils.LOGGER.warn('Please change your THEME setting.') try: self._THEMES = utils.get_theme_chain(self.config['THEME']) except Exception: utils.LOGGER.warn('''Can't load theme "{0}", using 'bootstrap' instead.'''.format(self.config['THEME'])) self.config['THEME'] = 'bootstrap' return self._get_themes() # Check consistency of USE_CDN and the current THEME (Issue #386) if self.config['USE_CDN']: bootstrap_path = utils.get_asset_path(os.path.join( 'assets', 'css', 'bootstrap.min.css'), self._THEMES) if bootstrap_path and bootstrap_path.split(os.sep)[-4] not in ['bootstrap', 'bootstrap3']: utils.LOGGER.warn('The USE_CDN option may be incompatible with your theme, because it uses a hosted version of bootstrap.') return self._THEMES THEMES = property(_get_themes) def _get_messages(self): return utils.load_messages(self.THEMES, self.translations, self.default_lang) MESSAGES = property(_get_messages) def _get_global_context(self): """Initialize some parts of GLOBAL_CONTEXT only when it's queried.""" if 'messages' not in self._GLOBAL_CONTEXT: self._GLOBAL_CONTEXT['messages'] = self.MESSAGES if 'has_custom_css' not in self._GLOBAL_CONTEXT: # check if custom css exist and is not empty custom_css_path = utils.get_asset_path( 'assets/css/custom.css', self.THEMES, self.config['FILES_FOLDERS'] ) if custom_css_path and self.file_exists(custom_css_path, not_empty=True): self._GLOBAL_CONTEXT['has_custom_css'] = True else: self._GLOBAL_CONTEXT['has_custom_css'] = False return self._GLOBAL_CONTEXT GLOBAL_CONTEXT = property(_get_global_context) def _get_template_system(self): if self._template_system is None: # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName( template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading {0} template system " "plugin\n".format(template_sys_name)) sys.exit(1) self._template_system = pi.plugin_object lookup_dirs = ['templates'] + [os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES] self._template_system.set_directories(lookup_dirs, self.config['CACHE_FOLDER']) return self._template_system template_system = property(_get_template_system) def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.COMPILERS` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in list(self.config['COMPILERS'].items()) if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit("Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'COMPILERS' in conf.py\n(The error is in" "one of {0})".format(', '.join(langs))) elif len(langs) > 1: langs = langs[:1] else: exit("COMPILERS in conf.py does not tell me how to " "handle '{0}' extensions.".format(ext)) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.GLOBAL_CONTEXT) local_context.update(context) # string, arguments local_context["formatmsg"] = lambda s, *a: s % a data = self.template_system.render_template( template_name, None, local_context) assert output_name.startswith( self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1:] # Treat our site as if output/ is "/" and then make all URLs relative, # making the site "relocatable" src = os.sep + url_part src = os.path.normpath(src) # The os.sep is because normpath will change "/" to "\" on windows src = "/".join(src.split(os.sep)) parsed_src = urlsplit(src) src_elems = parsed_src.path.split('/')[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == 'link': # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip('/'), context['lang']) else: return dst # Refuse to replace links that consist of a fragment only if ((not dst_url.scheme) and (not dst_url.netloc) and (not dst_url.path) and (not dst_url.params) and (not dst_url.query) and dst_url.fragment): return dst # Normalize dst = urljoin(src, dst.lstrip('/')) # Avoid empty links. if src == dst: if self.config.get('URL_TYPE') == 'absolute': dst = urljoin(self.config['BASE_URL'], dst.lstrip('/')) return dst elif self.config.get('URL_TYPE') == 'full_path': return dst else: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: if self.config.get('URL_TYPE') == 'absolute': dst = urljoin(self.config['BASE_URL'], dst.lstrip('/')) return dst if self.config.get('URL_TYPE') in ('full_path', 'absolute'): if self.config.get('URL_TYPE') == 'absolute': dst = urljoin(self.config['BASE_URL'], dst.lstrip('/')) return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result utils.makedirs(os.path.dirname(output_name)) doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b'<!DOCTYPE html>' + lxml.html.tostring(doc, encoding='utf8') with open(output_name, "wb+") as post_file: post_file.write(data) def path(self, kind, name, lang=None, is_link=False): """Build the path to a certain kind of page. These are mostly defined by plugins by registering via the register_path_handler method, except for slug and post_path which are defined in this class' init method. Here's some of the others, for historical reasons: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * category (and name is the category name) * category_rss (and name is the category name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) * post_path (name is 1st element in a POSTS/PAGES tuple) * slug (name is the slug of a post or story) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ if lang is None: lang = utils.LocaleBorg().current_lang path = self.path_handlers[kind](name, lang) if is_link: link = '/' + ('/'.join(path)) index_len = len(self.config['INDEX_FILE']) if self.config['STRIP_INDEXES'] and \ link[-(1 + index_len):] == '/' + self.config['INDEX_FILE']: return link[:-index_len] else: return link else: return os.path.join(*path) def post_path(self, name, lang): """post_path path handler""" return [_f for _f in [self.config['TRANSLATIONS'][lang], os.path.dirname(name), self.config['INDEX_FILE']] if _f] def slug_path(self, name, lang): """slug path handler""" results = [p for p in self.timeline if p.meta('slug') == name] if not results: utils.LOGGER.warning("Can't resolve path request for slug: {0}".format(name)) else: if len(results) > 1: utils.LOGGER.warning('Ambiguous path request for slug: {0}'.format(name)) return [_f for _f in results[0].permalink(lang).split('/') if _f] def register_path_handler(self, kind, f): if kind in self.path_handlers: utils.LOGGER.warning('Conflicting path handlers for kind: {0}'.format(kind)) else: self.path_handlers[kind] = f def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config['BASE_URL'], dst.lstrip('/')) return urlparse(dst).geturl() def rel_link(self, src, dst): # Normalize try: src = urljoin(self.config['BASE_URL'], src.lstrip('/')) except AttributeError: # sometimes, it’s an Undefined object. src = urljoin(self.config['BASE_URL'], src) try: dst = urljoin(src, dst.lstrip('/')) except AttributeError: dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split('/')[1:] dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def clean_task_paths(self, task): """Normalize target paths in the task.""" targets = task.get('targets', None) if targets is not None: task['targets'] = [os.path.normpath(t) for t in targets] return task def gen_tasks(self, name, plugin_category, doc=''): def flatten(task): if isinstance(task, dict): yield task else: for t in task: for ft in flatten(t): yield ft task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory(plugin_category): for task in flatten(pluginInfo.plugin_object.gen_tasks()): assert 'basename' in task task = self.clean_task_paths(task) yield task for multi in self.plugin_manager.getPluginsOfCategory("TaskMultiplier"): flag = False for task in multi.plugin_object.process(task, name): flag = True yield self.clean_task_paths(task) if flag: task_dep.append('{0}_{1}'.format(name, multi.plugin_object.name)) if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield { 'basename': name, 'doc': doc, 'actions': None, 'clean': True, 'task_dep': task_dep } def scan_posts(self): """Scan all the posts.""" if self._scanned: return seen = set([]) print("Scanning posts", end='', file=sys.stderr) lower_case_tags = set([]) for wildcard, destination, template_name, use_in_feeds in \ self.config['post_pages']: print(".", end='', file=sys.stderr) dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.normpath(os.path.join(destination, os.path.relpath(dirpath, dirname))) full_list = glob.glob(dir_glob) # Now let's look for things that are not in default_lang for lang in self.config['TRANSLATIONS'].keys(): lang_glob = dir_glob + "." + lang translated_list = glob.glob(lang_glob) for fname in translated_list: orig_name = os.path.splitext(fname)[0] if orig_name in full_list: continue full_list.append(orig_name) # We eliminate from the list the files inside any .ipynb folder full_list = [p for p in full_list if not any([x.startswith('.') for x in p.split(os.sep)])] for base_path in full_list: if base_path in seen: continue else: seen.add(base_path) post = Post( base_path, self.config, dest_dir, use_in_feeds, self.MESSAGES, template_name, self.get_compiler(base_path) ) self.global_data[post.source_path] = post if post.use_in_feeds: self.posts.append(post.source_path) self.posts_per_year[ str(post.date.year)].append(post.source_path) self.posts_per_month[ '{0}/{1:02d}'.format(post.date.year, post.date.month)].append(post.source_path) for tag in post.alltags: if tag.lower() in lower_case_tags: if tag not in self.posts_per_tag: # Tags that differ only in case other_tag = [k for k in self.posts_per_tag.keys() if k.lower() == tag.lower()][0] utils.LOGGER.error('You have cases that differ only in upper/lower case: {0} and {1}'.format(tag, other_tag)) utils.LOGGER.error('Tag {0} is used in: {1}'.format(tag, post.source_path)) utils.LOGGER.error('Tag {0} is used in: {1}'.format(other_tag, ', '.join(self.posts_per_tag[other_tag]))) sys.exit(1) else: lower_case_tags.add(tag.lower()) self.posts_per_tag[tag].append(post.source_path) self.posts_per_category[post.meta('category')].append(post.source_path) else: self.pages.append(post) self.post_per_file[post.destination_path(lang=lang)] = post self.post_per_file[post.destination_path(lang=lang, extension=post.source_ext())] = post for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!", file=sys.stderr) def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + \ self.template_system.template_deps(post.template_name) deps.extend(utils.get_asset_path(x, self.THEMES) for x in ('bundles', 'parent', 'engine')) deps = list(filter(None, deps)) context['post'] = post context['lang'] = lang context['title'] = post.title(lang) context['description'] = post.description(lang) context['permalink'] = post.permalink(lang) context['page_list'] = self.pages if post.use_in_feeds: context['enable_comments'] = True else: context['enable_comments'] = self.config['COMMENTS_IN_STORIES'] extension = self.get_compiler(post.source_path).extension() output_name = os.path.join(self.config['OUTPUT_FOLDER'], post.destination_path(lang, extension)) deps_dict = copy(context) deps_dict.pop('post') if post.prev_post: deps_dict['PREV_LINK'] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict['NEXT_LINK'] = [post.next_post.permalink(lang)] deps_dict['OUTPUT_FOLDER'] = self.config['OUTPUT_FOLDER'] deps_dict['TRANSLATIONS'] = self.config['TRANSLATIONS'] deps_dict['global'] = self.GLOBAL_CONTEXT deps_dict['comments'] = context['enable_comments'] if post: deps_dict['post_translations'] = post.translated_to task = { 'name': os.path.normpath(output_name), 'file_dep': deps, 'targets': [output_name], 'actions': [(self.render_template, [post.template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config['BLOG_TITLE'] context["description"] = self.config['BLOG_DESCRIPTION'] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.meta[lang]['title'], p.permalink(lang)) for p in posts] deps_context["global"] = self.GLOBAL_CONTEXT task = { 'name': os.path.normpath(output_name), 'targets': [output_name], 'file_dep': deps, 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_context)] } return utils.apply_filters(task, filters)
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_tag = defaultdict(list) self.timeline = [] self.pages = [] self._scanned = False # This is the default config # TODO: fill it self.config = { 'ARCHIVE_PATH': "", 'ARCHIVE_FILENAME': "archive.html", 'DEFAULT_LANG': "en", 'OUTPUT_FOLDER': 'output', 'FILES_FOLDERS': {'files': ''}, 'LISTINGS_FOLDER': 'listings', 'ADD_THIS_BUTTONS': True, 'INDEX_DISPLAY_POST_COUNT': 10, 'INDEX_TEASERS': False, 'MAX_IMAGE_SIZE': 1280, 'USE_FILENAME_AS_TITLE': True, 'SLUG_TAG_PATH': False, 'INDEXES_TITLE': "", 'INDEXES_PAGES': "", 'FILTERS': {}, 'USE_BUNDLES': True, 'TAG_PAGES_ARE_INDEXES': False, 'THEME': 'default', 'post_compilers': { "rest": ['.txt', '.rst'], "markdown": ['.md', '.mdown', '.markdown'], "html": ['.html', '.htm'], }, } self.config.update(config) self.config['TRANSLATIONS'] = self.config.get('TRANSLATIONS', {self.config['DEFAULT_LANG']: ''}) # FIXME: find way to achieve this with the plugins #if self.config['USE_BUNDLES'] and not webassets: #self.config['USE_BUNDLES'] = False self.GLOBAL_CONTEXT = self.config.get('GLOBAL_CONTEXT', {}) self.THEMES = utils.get_theme_chain(self.config['THEME']) self.MESSAGES = utils.load_messages(self.THEMES, self.config['TRANSLATIONS']) self.GLOBAL_CONTEXT['messages'] = self.MESSAGES self.GLOBAL_CONTEXT['_link'] = self.link self.GLOBAL_CONTEXT['rel_link'] = self.rel_link self.GLOBAL_CONTEXT['abs_link'] = self.abs_link self.GLOBAL_CONTEXT['exists'] = self.file_exists self.GLOBAL_CONTEXT['add_this_buttons'] = self.config[ 'ADD_THIS_BUTTONS'] self.GLOBAL_CONTEXT['index_display_post_count'] = self.config[ 'INDEX_DISPLAY_POST_COUNT'] self.GLOBAL_CONTEXT['use_bundles'] = self.config['USE_BUNDLES'] self.plugin_manager = PluginManager(categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, }) self.plugin_manager.setPluginInfoExtension('plugin') self.plugin_manager.setPluginPlaces([ os.path.join(os.path.dirname(__file__), 'plugins'), os.path.join(os.getcwd(), 'plugins'), ]) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Command"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) pluginInfo.plugin_object.short_help = pluginInfo.description self.commands[pluginInfo.name] = pluginInfo.plugin_object # Activate all task plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName( template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading %s template system plugin\n" % template_sys_name) sys.exit(1) self.template_system = pi.plugin_object self.template_system.set_directories( [os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES]) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for pluginInfo in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): self.compilers[pluginInfo.name] = \ pluginInfo.plugin_object.compile_html def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.post_compilers` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in self.config['post_compilers'].items() if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit("Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'post_compilers' in conf.py\n(The error is in" "one of %s)" % ', '.join(langs)) elif len(langs) > 1: langs = langs[:1] else: exit("post_compilers in conf.py does not tell me how to " "handle '%s' extensions." % ext) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): data = self.template_system.render_template( template_name, None, context, self.GLOBAL_CONTEXT) assert output_name.startswith(self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1:] # This is to support windows paths url_part = "/".join(url_part.split(os.sep)) src = urlparse.urljoin(self.config["BLOG_URL"], url_part) parsed_src = urlparse.urlsplit(src) src_elems = parsed_src.path.split('/')[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse.urlparse(dst) if dst_url.netloc: if dst_url.scheme == 'link': # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip('/'), context['lang']) else: return dst # Normalize dst = urlparse.urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlparse.urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = '<!DOCTYPE html>' + lxml.html.tostring(doc, encoding='utf8') with open(output_name, "w+") as post_file: post_file.write(data) def path(self, kind, name, lang, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ path = [] if kind == "tag_index": path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], 'index.html']) elif kind == "tag": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".html"]) elif kind == "tag_rss": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".xml"]) elif kind == "index": if name > 0: path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index-%s.html' % name]) else: path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index.html']) elif kind == "rss": path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['RSS_PATH'], 'rss.xml']) elif kind == "archive": if name: path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], name, 'index.html']) else: path = filter(None, [self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], self.config['ARCHIVE_FILENAME']]) elif kind == "gallery": path = filter(None, [self.config['GALLERY_PATH'], name, 'index.html']) elif kind == "listing": path = filter(None, [self.config['LISTINGS_FOLDER'], name + '.html']) if is_link: return '/' + ('/'.join(path)) else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urlparse.urljoin(self.config['BLOG_URL'], dst) return urlparse.urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urlparse.urljoin(self.config['BLOG_URL'], src) dst = urlparse.urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlparse.urlsplit(src) parsed_dst = urlparse.urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split('/')[1:] dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self): task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield { 'name': 'all', 'actions': None, 'clean': True, 'task_dep': task_dep } def scan_posts(self): """Scan all the posts.""" if not self._scanned: print "Scanning posts ", targets = set([]) for wildcard, destination, _, use_in_feeds in \ self.config['post_pages']: print ".", for base_path in glob.glob(wildcard): post = Post(base_path, destination, use_in_feeds, self.config['TRANSLATIONS'], self.config['DEFAULT_LANG'], self.config['BLOG_URL'], self.MESSAGES) for lang, langpath in self.config['TRANSLATIONS'].items(): dest = (destination, langpath, post.pagenames[lang]) if dest in targets: raise Exception( 'Duplicated output path %r in post %r' % (post.pagenames[lang], base_path)) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[ str(post.date.year)].append(post.post_name) for tag in post.tags: self.posts_per_tag[tag].append(post.post_name) else: self.pages.append(post) for name, post in self.global_data.items(): self.timeline.append(post) self.timeline.sort(cmp=lambda a, b: cmp(a.date, b.date)) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print "done!" def generic_page_renderer(self, lang, wildcard, template_name, destination, filters): """Render post fragments to final HTML pages.""" for post in glob.glob(wildcard): post_name = os.path.splitext(post)[0] context = {} post = self.global_data[post_name] deps = post.deps(lang) + \ self.template_system.template_deps(template_name) context['post'] = post context['lang'] = lang context['title'] = post.title(lang) context['description'] = post.description(lang) context['permalink'] = post.permalink(lang) context['page_list'] = self.pages output_name = os.path.join( self.config['OUTPUT_FOLDER'], self.config['TRANSLATIONS'][lang], destination, post.pagenames[lang] + ".html") deps_dict = copy(context) deps_dict.pop('post') if post.prev_post: deps_dict['PREV_LINK'] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict['NEXT_LINK'] = [post.next_post.permalink(lang)] deps_dict['OUTPUT_FOLDER'] = self.config['OUTPUT_FOLDER'] deps_dict['TRANSLATIONS'] = self.config['TRANSLATIONS'] task = { 'name': output_name.encode('utf-8'), 'file_dep': deps, 'targets': [output_name], 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config['BLOG_TITLE'] context["description"] = self.config['BLOG_DESCRIPTION'] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.titles[lang], p.permalink(lang)) for p in posts] task = { 'name': output_name.encode('utf8'), 'targets': [output_name], 'file_dep': deps, 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_context)] } yield utils.apply_filters(task, filters)
level=logging.DEBUG) logging.info('---------LOG START-------------') # yapsy plugins_paths = ["openbci/plugins"] # if args.plugins_path: # plugins_paths += args.plugins_path manager.setPluginPlaces(plugins_paths) manager.collectPlugins() print("Found plugins:") for plugin in manager.getAllPlugins(): print("[ " + plugin.name + " ]") print("\n") osc = manager.getPluginByName("streamer_osc") # osc.plugin_object.pre_activate(['localhost', 57120, "/obci0"], sample_rate=250, eeg_channels=8, aux_channels=3, imp_channels=0) osc.plugin_object.pre_activate(['localhost', 57120, "/obci0"], sample_rate=250, eeg_channels=8, aux_channels=3, imp_channels=0) # osc.plugin_object.ip='localhost' # osc.plugin_object.port=57120 # osc.plugin_object.address="/obci0" # print("osc.plugin_object.port", osc.plugin_object.port)
class Evidencer: def __init__(self): self.extractors_plugins_directories = [ DefaultDirectory.extractors_plugins() ] self.extractors_pre_configurations_directories = [ DefaultDirectory.extractors_pre_configurations() ] self._plugin_manager = PluginManager() self._extractors_pre_configurations = {} def extract_by_file(self, user_extractors_configurations_file): working_directory = os.path.abspath( os.path.dirname(user_extractors_configurations_file)) user_extractors_configurations = self._read_json_file( user_extractors_configurations_file) return self.extract_all(user_extractors_configurations, working_directory) def extract_all(self, user_extractors_configurations, working_directory): self._import() results = {} for user_extractor_configuration in user_extractors_configurations[ ConfigurationKeys.configurations]: result = self._extract_one(user_extractor_configuration, working_directory) extraction_name = user_extractor_configuration[ ConfigurationKeys.extraction_name] results[extraction_name] = result return results def append_extractors_plugin_directory(self, path): self.extractors_plugins_directories.append(path) def append_extractors_pre_configurations_directory(self, path): self.extractors_pre_configurations_directories.append(path) def _import(self): self._import_extractors() self._extractors_pre_configurations = {} self._import_extractor_pre_configurations() def _import_extractor_pre_configurations(self): for directory in self.extractors_pre_configurations_directories: for file_configuration in Path(directory).glob( os.path.join(os.path.join("**", "*.json"))): self._import_extractor_pre_configuration_file( file_configuration) def _import_extractor_pre_configuration_file(self, file_configuration): configuration = self._read_json_file(file_configuration) # TODO: check json schema self._extractors_pre_configurations[configuration[ ConfigurationKeys.pre_configuration_name]] = configuration def _read_json_file(self, file_path): try: with open(file_path) as json_file: return json.load(json_file) except Exception as e: raise EvidencerException( "Error during parsing json file '%s'.\n%s" % (file_path, str(e))) def _import_extractors(self): self._plugin_manager.setPluginPlaces( self.extractors_plugins_directories) self._plugin_manager.collectPlugins() def _extract_one(self, user_extractor_configuration, working_directory): extractor_configuration = self._prepare_extractor_configuration( user_extractor_configuration, working_directory) result = self._protected_extraction(extractor_configuration) return Extraction(extractor_configuration, result) def _protected_extraction(self, extractor_configuration): try: extractor_name = extractor_configuration[ ConfigurationKeys.extractor] extractor = self._plugin_manager.getPluginByName(extractor_name) return extractor.plugin_object.extract(extractor_configuration) except Exception as e: raise EvidencerException( "Error during extraction. Possible reasons are non-existent extractor or error in extractor.\n%s" % (str(e))) def _prepare_extractor_configuration(self, user_extractor_configuration, working_directory): pre_configuration_parameters = self._pre_configuration_parameters( user_extractor_configuration) parameters_merge = jsonmerge.merge( pre_configuration_parameters, user_extractor_configuration[ConfigurationKeys.parameters]) return { ConfigurationKeys.extractor: user_extractor_configuration[ConfigurationKeys.extractor], ConfigurationKeys.extraction_name: user_extractor_configuration[ConfigurationKeys.extraction_name], ConfigurationKeys.parameters: parameters_merge, ConfigurationKeys.working_directory: working_directory } def _pre_configuration_parameters(self, user_extractor_configuration): if ConfigurationKeys.pre_configuration in user_extractor_configuration: pre_configuration_name = user_extractor_configuration[ ConfigurationKeys.pre_configuration] try: return self._extractors_pre_configurations[ pre_configuration_name][ConfigurationKeys.parameters] except Exception as e: raise EvidencerException( "Non-existent pre configuration '%s'.\n%s" % (pre_configuration_name, str(e))) else: return {}
class EfetchPluginManager(object): """This class manages and creates plugin objects""" def __init__(self, plugins_file, curr_directory): # Plugin Manager Setup self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces([curr_directory + u'/plugins/']) self.plugins_file = plugins_file self.reload_plugins() def reload_plugins_file(self): """Reloads all plugins from the YAML file""" self.config_file_plugins = self.load_plugin_config(self.plugins_file) def reload_plugins(self): """Reloads all Yapsy and YAML file plugins""" self.plugin_manager.collectPlugins() for plugin in self.plugin_manager.getAllPlugins(): self.plugin_manager.activatePluginByName(plugin.name) self.reload_plugins_file() def load_plugin_config(self, plugins_file): """Loads the plugin config file""" if not os.path.isfile(plugins_file): logging.warn(u'Could not find Plugin Configuration File "' + plugins_file + u'"') return {} with open(plugins_file, 'r') as stream: try: return yaml.load(stream) except yaml.YAMLError as exc: logging.error(u'Failed to parse Plugin Configuration File') logging.error(exc) return {} def get_all_plugins(self): """Gets a list of all the plugins""" plugins = [] for plugin in self.plugin_manager.getAllPlugins(): plugins.append(plugin.name) for key in self.config_file_plugins: plugins.append(key) return plugins def get_plugin_by_name(self, name): """Gets an Efetch plugin by name""" plugin = self.plugin_manager.getPluginByName(str(name).lower()) if not plugin and name not in self.config_file_plugins: logging.warn(u'Request made for unknown plugin "' + name + u'"') abort(404, u'Could not find plugin "' + name + u'"') elif not plugin: plugin = self.config_file_plugins[name] return Plugin(plugin.get('name', 'None'), plugin.get('description', 'None'), plugin.get('cache', True), plugin.get('popularity', 5), plugin.get('fast', False), plugin.get('store', False), map(str.lower, plugin.get('mimetypes', [])), map(str.lower, plugin.get('extensions', [])), map(str.lower, plugin.get('os', [])), plugin.get('command', False), plugin.get('format', 'Text'), plugin.get('file', False), plugin.get('openwith', False), plugin.get('icon', 'fa-file-o')) else: return plugin.plugin_object
class TestDef(object): def __init__(self): # set aside storage for options and cmd line args self.options = {} self.args = [] # record if we have loaded the plugins or # not - this is just a bozo check to ensure # someone doesn't tell us to do it twice self.loaded = False # set aside a spot for a logger object, and # note that it hasn't yet been defined self.logger = None self.modcmd = None self.execmd = None self.config = None self.stages = None self.tools = None self.utilities = None self.log = {} def setOptions(self, args): self.options = vars(args) self.args = args # if they want us to clear the scratch, then do so if self.options['clean']: shutil.rmtree(self.options['scratchdir']) # setup the scratch directory _mkdir_recursive(self.options['scratchdir']) # scan the key-value pairs obtained from the configuration # parser and compare them with the options defined for a # given plugin. Generate an output dictionary that contains # the updated set of option values, the default value for # any option that wasn't included in the configuration file, # and return an error status plus output identifying any # keys in the configuration file that are not supported # by the list of options # # @log [INPUT] # - a dictionary that will return the status plus # stderr containing strings identifying any # provided keyvals that don't have a corresponding # supported option # @options [INPUT] # - a dictionary of tuples, each consisting of three # entries: # (a) the default value # (b) data type # (c) a help-like description # @keyvals [INPUT] # - a dictionary of key-value pairs obtained from # the configuration parser # @target [OUTPUT] # - the resulting dictionary of key-value pairs def parseOptions(self, log, options, keyvals, target): # parse the incoming keyvals dictionary against the source # options. If a source option isn't provided, then # copy it across to the target. opts = list(options.keys()) kvkeys = list(keyvals.keys()) for opt in opts: found = False for kvkey in kvkeys: if kvkey == opt: # they provided us with an update, so # pass this value into the target - expand # any provided lists if keyvals[kvkey] is None: continue if type(keyvals[kvkey]) is bool: target[opt] = keyvals[kvkey] else: if len(keyvals[kvkey]) == 0: # this indicates they do not want this option found = True break if keyvals[kvkey][0][0] == "[": # they provided a list - remove the brackets val = keyvals[kvkey].replace('[','') val = val.replace(']','') # split the input to pickup sets of options newvals = list(val) # convert the values to specified type i=0 for val in newvals: if type(opt[0]) is bool: if val.lower in ['true', '1', 't', 'y', 'yes', 'yeah', 'yup', 'certainly', 'uh-huh']: newvals[i] = True else: newvals[i] = False i = i + 1 target[opt] = newvals else: val = keyvals[kvkey] if type(opt[0]) is bool: if val.lower in ['true', '1', 't', 'y', 'yes', 'yeah', 'yup', 'certainly', 'uh-huh']: val = True else: val = False target[opt] = val found = True break if not found: # they didn't provide this one, so # transfer only the value across target[opt] = options[opt][0] # now go thru in the reverse direction to see # if any keyvals they provided aren't supported # as this would be an error stderr = [] for kvkey in kvkeys: # ignore some standard keys if kvkey in ['section', 'plugin']: continue try: if target[kvkey] is not None: pass except KeyError: # some always need to be passed if kvkey in ['parent', 'asis']: target[kvkey] = keyvals[kvkey] else: stderr.append("Option " + kvkey + " is not supported") if stderr: # mark the log with an error status log['status'] = 1 # pass the errors back log['stderr'] = stderr else: log['status'] = 0 log['options'] = target return def loadPlugins(self, basedir, topdir): if self.loaded: print("Cannot load plugins multiple times") exit(1) self.loaded = True # find the loader utility so we can bootstrap ourselves try: m = imp.load_source("LoadClasses", os.path.join(basedir, "LoadClasses.py")); except ImportError: print("ERROR: unable to load LoadClasses that must contain the class loader object") exit(1) cls = getattr(m, "LoadClasses") a = cls() # setup the loader object self.loader = a.__class__(); # Setup the array of directories we will search for plugins # Note that we always look at the topdir location by default plugindirs = [] plugindirs.append(topdir) if self.options['plugindir']: # could be a comma-delimited list, so split on commas x = self.options['plugindir'].split(',') for y in x: # prepend so we always look at the given # location first in case the user wants # to "overload/replace" a default MTT # class definition plugindirs.prepend(y) # Traverse the plugin directory tree and add all # the class definitions we can find for dirPath in plugindirs: filez = os.listdir(dirPath) for file in filez: file = os.path.join(dirPath, file) if os.path.isdir(file): self.loader.load(file) # Build the stages plugin manager self.stages = PluginManager() # set the location self.stages.setPluginPlaces(plugindirs) # Get a list of all the categories - this corresponds to # the MTT stages that have been defined. Note that we # don't need to formally define the stages here - anyone # can add a new stage, or delete an old one, by simply # adding or removing a plugin directory. self.stages.setCategoriesFilter(self.loader.stages) # Load all plugins we find there self.stages.collectPlugins() # Build the tools plugin manager - tools differ from sections # in that they are plugins we will use to execute the various # sections. For example, the TestRun section clearly needs the # ability to launch jobs. There are many ways to launch jobs # depending on the environment, and sometimes several ways to # start jobs even within one environment (e.g., mpirun vs # direct launch). self.tools = PluginManager() # location is the same self.tools.setPluginPlaces(plugindirs) # Get the list of tools - not every tool will be capable # of executing. For example, a tool that supports direct launch # against a specific resource manager cannot be used on a # system being managed by a different RM. self.tools.setCategoriesFilter(self.loader.tools) # Load all the tool plugins self.tools.collectPlugins() # Tool plugins are required to provide a function we can # probe to determine if they are capable of operating - check # those now and prune those tools that cannot support this # environment # Build the utilities plugins self.utilities = PluginManager() # set the location self.utilities.setPluginPlaces(plugindirs) # Get the list of available utilities. self.utilities.setCategoriesFilter(self.loader.utilities) # Load all the utility plugins self.utilities.collectPlugins() # since we use these all over the place, find the # ExecuteCmd and ModuleCmd plugins and record them availUtil = list(self.loader.utilities.keys()) for util in availUtil: for pluginInfo in self.utilities.getPluginsOfCategory(util): if "ExecuteCmd" == pluginInfo.plugin_object.print_name(): self.execmd = pluginInfo.plugin_object elif "ModuleCmd" == pluginInfo.plugin_object.print_name(): self.modcmd = pluginInfo.plugin_object # initialize this module self.modcmd.setCommand(self.options) if self.execmd is not None and self.modcmd is not None: break if self.execmd is None: print("ExecuteCmd plugin was not found") print("This is a basic capability required") print("for MTT operations - cannot continue") sys.exit(1) return def printInfo(self): # Print the available MTT sections out, if requested if self.options['listsections']: print("Supported MTT stages:") # print them in the default order of execution for stage in self.loader.stageOrder: print(" " + stage) exit(0) # Print the detected plugins for a given stage if self.options['listplugins']: # if the list is '*', print the plugins for every stage if self.options['listplugins'] == "*": sections = self.loader.stageOrder else: sections = self.options['listplugins'].split(',') print() for section in sections: print(section + ":") try: for pluginInfo in self.stages.getPluginsOfCategory(section): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid stage name " + section) print() exit(1) # Print the options for a given plugin if self.options['liststageoptions']: # if the list is '*', print the options for every stage/plugin if self.options['liststageoptions'] == "*": sections = self.loader.stageOrder else: sections = self.options['liststageoptions'].split(',') print() for section in sections: print(section + ":") try: for pluginInfo in self.stages.getPluginsOfCategory(section): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options(self, " ") except KeyError: print(" Invalid stage name " + section) print() exit(1) # Print the available MTT tools out, if requested if self.options['listtools']: print("Available MTT tools:") availTools = list(self.loader.tools.keys()) for tool in availTools: print(" " + tool) exit(0) # Print the detected tool plugins for a given tool type if self.options['listtoolmodules']: # if the list is '*', print the plugins for every type if self.options['listtoolmodules'] == "*": print() availTools = list(self.loader.tools.keys()) else: availTools = self.options['listtoolmodules'].split(',') print() for tool in availTools: print(tool + ":") try: for pluginInfo in self.tools.getPluginsOfCategory(tool): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid tool type name",tool) print() exit(1) # Print the options for a given plugin if self.options['listtooloptions']: # if the list is '*', print the options for every stage/plugin if self.options['listtooloptions'] == "*": availTools = list(self.loader.tools.keys()) else: availTools = self.options['listtooloptions'].split(',') print() for tool in availTools: print(tool + ":") try: for pluginInfo in self.tools.getPluginsOfCategory(tool): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options(self, " ") except KeyError: print(" Invalid tool type name " + tool) print() exit(1) # Print the available MTT utilities out, if requested if self.options['listutils']: print("Available MTT utilities:") availUtils = list(self.loader.utilities.keys()) for util in availUtils: print(" " + util) exit(0) # Print the detected utility plugins for a given tool type if self.options['listutilmodules']: # if the list is '*', print the plugins for every type if self.options['listutilmodules'] == "*": print() availUtils = list(self.loader.utilities.keys()) else: availUtils = self.options['listutilitymodules'].split(',') print() for util in availUtils: print(util + ":") try: for pluginInfo in self.utilities.getPluginsOfCategory(util): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid utility type name") print() exit(1) # Print the options for a given plugin if self.options['listutiloptions']: # if the list is '*', print the options for every stage/plugin if self.options['listutiloptions'] == "*": availUtils = list(self.loader.utilities.keys()) else: availUtils = self.options['listutiloptions'].split(',') print() for util in availUtils: print(util + ":") try: for pluginInfo in self.utilities.getPluginsOfCategory(util): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options(self, " ") except KeyError: print(" Invalid utility type name " + util) print() exit(1) # if they asked for the version info, print it and exit if self.options['version']: for pluginInfo in self.tools.getPluginsOfCategory("Version"): print("MTT Base: " + pluginInfo.plugin_object.getVersion()) print("MTT Client: " + pluginInfo.plugin_object.getClientVersion()) sys.exit(0) def openLogger(self): # there must be a logger utility or we can't do # anything useful if not self.utilities.activatePluginByName("Logger", "Base"): print("Required Logger plugin not found or could not be activated") sys.exit(1) # execute the provided test description self.logger = self.utilities.getPluginByName("Logger", "Base").plugin_object self.logger.open(self) return def configTest(self): # Tuck away the full path and the testFile file name self.log['inifiles'] = self.args.ini_files[0] for testFile in self.log['inifiles']: if not os.path.isfile(testFile): print("Test .ini file not found!: " + testFile) sys.exit(1) self.config = configparser.ConfigParser() # Set the config parser to make option names case sensitive. self.config.optionxform = str self.config.read(self.log['inifiles']) for section in self.config.sections(): if self.logger is not None: self.logger.verbose_print("SECTION: " + section) self.logger.verbose_print(self.config.items(section)) if self.options['dryrun']: continue if section.startswith("SKIP") or section.startswith("skip"): # users often want to temporarily ignore a section # of their test definition file, but don't want to # remove it lest they forget what it did. So let # them just mark the section as "skip" to be ignored continue; return def executeTest(self): if not self.loaded: print("Plugins have not been loaded - cannot execute test") exit(1) if self.config is None: print("No test definition file was parsed - cannot execute test") exit(1) if not self.tools.getPluginByName(self.options['executor'], "Executor"): print("Specified executor",self.executor,"not found") exit(1) # activate the specified plugin self.tools.activatePluginByName(self.options['executor'], "Executor") # execute the provided test description executor = self.tools.getPluginByName(self.options['executor'], "Executor") executor.plugin_object.execute(self) return def printOptions(self, options): # if the options are empty, report that if not options: lines = ["None"] return lines # create the list of options opts = [] vals = list(options.keys()) for val in vals: opts.append(val) if options[val][0] is None: opts.append("None") elif isinstance(options[val][0], bool): if options[val][0]: opts.append("True") else: opts.append("False") elif isinstance(options[val][0], list): opts.append(" ".join(options[val][0])) else: opts.append(options[val][0]) opts.append(options[val][1]) # print the options, their default value, and # the help description in 3 column format max1 = 0 max2 = 0 for i in range(0,len(opts),3): # we want all the columns to line up # and left-justify, so first find out # the max len of each of the first two # column entries if len(opts[i]) > max1: max1 = len(opts[i]) if len(opts[i+1]) > max2: max2 = len(opts[i+1]) # provide some spacing max1 = max1 + 4 max2 = max2 + 4 # cycle thru again, padding each entry to # align the columns lines = [] sp = " " for i in range(0,len(opts),3): line = opts[i] + (max1-len(opts[i]))*sp line = line + opts[i+1] + (max2-len(opts[i+1]))*sp # to make this more readable, we will wrap the line at # 130 characters. First, see if the line is going to be # too long if 130 < (len(line) + len(opts[i+2])): # split the remaining column into individual words words = opts[i+2].split() first = True for word in words: if (len(line) + len(word)) < 130: if first: line = line + word first = False else: line = line + " " + word else: lines.append(line) line = (max1 + max2)*sp + word if 0 < len(line): lines.append(line) else: # the line is fine - so just add the last piece line = line + opts[i+2] # append the result lines.append(line) # add one blank line lines.append("") return lines
class TestDef(object): def __init__(self): # set aside storage for options and cmd line args self.options = {} self.args = [] # record if we have loaded the plugins or # not - this is just a bozo check to ensure # someone doesn't tell us to do it twice self.loaded = False # set aside a spot for a logger object, and # note that it hasn't yet been defined self.logger = None self.modcmd = None self.execmd = None self.harasser = None self.config = None self.stages = None self.tools = None self.utilities = None self.defaults = None self.log = {} def setOptions(self, args): self.options = vars(args) self.args = args # if they want us to clear the scratch, then do so if self.options['clean'] and os.path.isdir(self.options['scratchdir']): shutil.rmtree(self.options['scratchdir']) # setup the scratch directory _mkdir_recursive(self.options['scratchdir']) # private function to convert values def __convert_value(self, opt, inval): if opt is None or type(opt) is str: return 0, inval elif type(opt) is bool: if type(inval) is bool: return 0, inval elif type(inval) is str: if inval.lower() in ['true', '1', 't', 'y', 'yes']: return 0, True else: return 0, False elif type(inval) is int: if 0 == inval: return 0, False else: return 0, True elif is_py2 and type(inval) is unicode: return 0, int(inval) else: # unknown conversion required print("Unknown conversion required for " + inval) return 1, None elif type(opt) is int: if type(inval) is int: return 0, inval elif type(inval) is str: return 0, int(inval) else: # unknown conversion required print("Unknown conversion required for " + inval) return 1, None elif type(opt) is float: if type(inval) is float: return 0, inval elif type(inval) is str or type(inval) is int: return 0, float(inval) else: # unknown conversion required print("Unknown conversion required for " + inval) return 1, None else: return 1, None # scan the key-value pairs obtained from the configuration # parser and compare them with the options defined for a # given plugin. Generate an output dictionary that contains # the updated set of option values, the default value for # any option that wasn't included in the configuration file, # and return an error status plus output identifying any # keys in the configuration file that are not supported # by the list of options # # @log [INPUT] # - a dictionary that will return the status plus # stderr containing strings identifying any # provided keyvals that don't have a corresponding # supported option # @options [INPUT] # - a dictionary of tuples, each consisting of three # entries: # (a) the default value # (b) data type # (c) a help-like description # @keyvals [INPUT] # - a dictionary of key-value pairs obtained from # the configuration parser # @target [OUTPUT] # - the resulting dictionary of key-value pairs def parseOptions(self, log, options, keyvals, target): # parse the incoming keyvals dictionary against the source # options. If a source option isn't provided, then # copy it across to the target. opts = list(options.keys()) kvkeys = list(keyvals.keys()) for opt in opts: found = False for kvkey in kvkeys: if kvkey == opt: # they provided us with an update, so # pass this value into the target - expand # any provided lists if keyvals[kvkey] is None: continue st, outval = self.__convert_value(options[opt][0], keyvals[kvkey]) if 0 == st: target[opt] = outval else: if len(keyvals[kvkey]) == 0: # this indicates they do not want this option found = True break if keyvals[kvkey][0][0] == "[": # they provided a list - remove the brackets val = keyvals[kvkey].replace('[', '') val = val.replace(']', '') # split the input to pickup sets of options newvals = list(val) # convert the values to specified type i = 0 for val in newvals: st, newvals[i] = self.__convert_value( opt[0], val) i = i + 1 target[opt] = newvals else: st, target[opt] = self.__convert_value( opt[0], keyvals[kvkey]) found = True break if not found: # they didn't provide this one, so # transfer only the value across target[opt] = options[opt][0] # add in any default settings that have not # been overridden - anything set by this input # stage will override the default if self.defaults is not None: keys = self.defaults.options.keys() for key in keys: if key not in target: target[key] = self.defaults.options[key][0] # now go thru in the reverse direction to see # if any keyvals they provided aren't supported # as this would be an error stderr = [] for kvkey in kvkeys: # ignore some standard keys if kvkey in ['section', 'plugin']: continue try: if target[kvkey] is not None: pass except KeyError: # some always need to be passed if kvkey in ['parent', 'asis']: target[kvkey] = keyvals[kvkey] else: stderr.append("Option " + kvkey + " is not supported") if stderr: # mark the log with an error status log['status'] = 1 # pass the errors back log['stderr'] = stderr else: log['status'] = 0 log['options'] = target return def loadPlugins(self, basedir, topdir): if self.loaded: print("Cannot load plugins multiple times") exit(1) self.loaded = True # find the loader utility so we can bootstrap ourselves try: m = imp.load_source("LoadClasses", os.path.join(basedir, "LoadClasses.py")) except ImportError: print( "ERROR: unable to load LoadClasses that must contain the class loader object" ) exit(1) cls = getattr(m, "LoadClasses") a = cls() # setup the loader object self.loader = a.__class__() # Setup the array of directories we will search for plugins # Note that we always look at the topdir location by default plugindirs = [] plugindirs.append(topdir) if self.options['plugindir']: # could be a comma-delimited list, so split on commas x = self.options['plugindir'].split(',') for y in x: # prepend so we always look at the given # location first in case the user wants # to "overload/replace" a default MTT # class definition plugindirs.insert(0, y) # Traverse the plugin directory tree and add all # the class definitions we can find for dirPath in plugindirs: try: filez = os.listdir(dirPath) for file in filez: file = os.path.join(dirPath, file) if os.path.isdir(file): self.loader.load(file) except: if not self.options['ignoreloadpatherrs']: print("Plugin directory", dirPath, "not found") sys.exit(1) # Build the stages plugin manager self.stages = PluginManager() # set the location self.stages.setPluginPlaces(plugindirs) # Get a list of all the categories - this corresponds to # the MTT stages that have been defined. Note that we # don't need to formally define the stages here - anyone # can add a new stage, or delete an old one, by simply # adding or removing a plugin directory. self.stages.setCategoriesFilter(self.loader.stages) # Load all plugins we find there self.stages.collectPlugins() # Build the tools plugin manager - tools differ from sections # in that they are plugins we will use to execute the various # sections. For example, the TestRun section clearly needs the # ability to launch jobs. There are many ways to launch jobs # depending on the environment, and sometimes several ways to # start jobs even within one environment (e.g., mpirun vs # direct launch). self.tools = PluginManager() # location is the same self.tools.setPluginPlaces(plugindirs) # Get the list of tools - not every tool will be capable # of executing. For example, a tool that supports direct launch # against a specific resource manager cannot be used on a # system being managed by a different RM. self.tools.setCategoriesFilter(self.loader.tools) # Load all the tool plugins self.tools.collectPlugins() # Tool plugins are required to provide a function we can # probe to determine if they are capable of operating - check # those now and prune those tools that cannot support this # environment # Build the utilities plugins self.utilities = PluginManager() # set the location self.utilities.setPluginPlaces(plugindirs) # Get the list of available utilities. self.utilities.setCategoriesFilter(self.loader.utilities) # Load all the utility plugins self.utilities.collectPlugins() # since we use these all over the place, find the # ExecuteCmd and ModuleCmd plugins and record them availUtil = list(self.loader.utilities.keys()) for util in availUtil: for pluginInfo in self.utilities.getPluginsOfCategory(util): if "ExecuteCmd" == pluginInfo.plugin_object.print_name(): self.execmd = pluginInfo.plugin_object elif "ModuleCmd" == pluginInfo.plugin_object.print_name(): self.modcmd = pluginInfo.plugin_object # initialize this module self.modcmd.setCommand(self.options) if self.execmd is not None and self.modcmd is not None: break if self.execmd is None: print("ExecuteCmd plugin was not found") print("This is a basic capability required") print("for MTT operations - cannot continue") sys.exit(1) print(self.tools.getPluginsOfCategory("Harasser")) for pluginInfo in self.tools.getPluginsOfCategory("Harasser"): print(pluginInfo.plugin_object.print_name()) if "Harasser" == pluginInfo.plugin_object.print_name(): self.harasser = pluginInfo.plugin_object break if self.harasser is None: print("Harasser plugin was not found") print("This is required for all TestRun plugins") print("cannot continue") sys.exit(1) # similarly, capture the highest priority defaults stage here pri = -1 for pluginInfo in self.stages.getPluginsOfCategory("MTTDefaults"): if pri < pluginInfo.plugin_object.priority(): self.defaults = pluginInfo.plugin_object pri = pluginInfo.plugin_object.priority() return def printInfo(self): # Print the available MTT sections out, if requested if self.options['listsections']: print("Supported MTT stages:") # print them in the default order of execution for stage in self.loader.stageOrder: print(" " + stage) exit(0) # Print the detected plugins for a given stage if self.options['listplugins']: # if the list is '*', print the plugins for every stage if self.options['listplugins'] == "*": sections = self.loader.stageOrder else: sections = self.options['listplugins'].split(',') print() for section in sections: print(section + ":") try: for pluginInfo in self.stages.getPluginsOfCategory( section): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid stage name " + section) print() exit(1) # Print the options for a given plugin if self.options['liststageoptions']: # if the list is '*', print the options for every stage/plugin if self.options['liststageoptions'] == "*": sections = self.loader.stageOrder else: sections = self.options['liststageoptions'].split(',') print() for section in sections: print(section + ":") try: for pluginInfo in self.stages.getPluginsOfCategory( section): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options( self, " ") except KeyError: print(" Invalid stage name " + section) print() exit(1) # Print the available MTT tools out, if requested if self.options['listtools']: print("Available MTT tools:") availTools = list(self.loader.tools.keys()) for tool in availTools: print(" " + tool) exit(0) # Print the detected tool plugins for a given tool type if self.options['listtoolmodules']: # if the list is '*', print the plugins for every type if self.options['listtoolmodules'] == "*": print() availTools = list(self.loader.tools.keys()) else: availTools = self.options['listtoolmodules'].split(',') print() for tool in availTools: print(tool + ":") try: for pluginInfo in self.tools.getPluginsOfCategory(tool): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid tool type name", tool) print() exit(1) # Print the options for a given plugin if self.options['listtooloptions']: # if the list is '*', print the options for every stage/plugin if self.options['listtooloptions'] == "*": availTools = list(self.loader.tools.keys()) else: availTools = self.options['listtooloptions'].split(',') print() for tool in availTools: print(tool + ":") try: for pluginInfo in self.tools.getPluginsOfCategory(tool): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options( self, " ") except KeyError: print(" Invalid tool type name " + tool) print() exit(1) # Print the available MTT utilities out, if requested if self.options['listutils']: print("Available MTT utilities:") availUtils = list(self.loader.utilities.keys()) for util in availUtils: print(" " + util) exit(0) # Print the detected utility plugins for a given tool type if self.options['listutilmodules']: # if the list is '*', print the plugins for every type if self.options['listutilmodules'] == "*": print() availUtils = list(self.loader.utilities.keys()) else: availUtils = self.options['listutilitymodules'].split(',') print() for util in availUtils: print(util + ":") try: for pluginInfo in self.utilities.getPluginsOfCategory( util): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid utility type name") print() exit(1) # Print the options for a given plugin if self.options['listutiloptions']: # if the list is '*', print the options for every stage/plugin if self.options['listutiloptions'] == "*": availUtils = list(self.loader.utilities.keys()) else: availUtils = self.options['listutiloptions'].split(',') print() for util in availUtils: print(util + ":") try: for pluginInfo in self.utilities.getPluginsOfCategory( util): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options( self, " ") except KeyError: print(" Invalid utility type name " + util) print() exit(1) # if they asked for the version info, print it and exit if self.options['version']: for pluginInfo in self.tools.getPluginsOfCategory("Version"): print("MTT Base: " + pluginInfo.plugin_object.getVersion()) print("MTT Client: " + pluginInfo.plugin_object.getClientVersion()) sys.exit(0) def openLogger(self): # there must be a logger utility or we can't do # anything useful if not self.utilities.activatePluginByName("Logger", "Base"): print("Required Logger plugin not found or could not be activated") sys.exit(1) # execute the provided test description self.logger = self.utilities.getPluginByName("Logger", "Base").plugin_object self.logger.open(self) return def configTest(self): # setup the configuration parser self.config = configparser.SafeConfigParser( interpolation=configparser.ExtendedInterpolation()) # Set the config parser to make option names case sensitive. self.config.optionxform = str # fill ENV section with environemt variables self.config.add_section('ENV') for k, v in os.environ.items(): self.config.set('ENV', k, v.replace("$", "$$")) # log the list of files - note that the argument parser # puts the input files in a list, with the first member # being the list of input files self.log['inifiles'] = self.args.ini_files[0] # initialize the list of active sections self.actives = [] # if they specified a list to execute, then use it sections = [] if self.args.section: sections = self.args.section.split(",") skip = False elif self.args.skipsections: sections = self.args.skipsections.split(",") skip = True else: sections = None # cycle thru the input files for testFile in self.log['inifiles']: if not os.path.isfile(testFile): print("Test description file", testFile, "not found!") sys.exit(1) self.config.read(self.log['inifiles']) # Check for ENV input required_env = [] all_file_contents = [] for testFile in self.log['inifiles']: file_contents = open(testFile, "r").read() file_contents = "\n".join([ "%s %d: %s" % (testFile.split("/")[-1], i, l) for i, l in enumerate(file_contents.split("\n")) if not l.lstrip().startswith("#") ]) all_file_contents.append(file_contents) if "${ENV:" in file_contents: required_env.extend([ s.split("}")[0] for s in file_contents.split("${ENV:")[1:] ]) env_not_found = set( [e for e in required_env if e not in os.environ.keys()]) lines_with_env_not_found = [] for file_contents in all_file_contents: lines_with_env_not_found.extend(["%s: %s"%(",".join([e for e in env_not_found if "${ENV:%s}"%e in l]),l) \ for l in file_contents.split("\n") \ if sum(["${ENV:%s}"%e in l for e in env_not_found])]) if lines_with_env_not_found: print("ERROR: Not all required environment variables are defined.") print("ERROR: Still need:") for l in lines_with_env_not_found: print("ERROR: %s" % l) sys.exit(1) for section in self.config.sections(): if section.startswith("SKIP") or section.startswith("skip"): # users often want to temporarily ignore a section # of their test definition file, but don't want to # remove it lest they forget what it did. So let # them just mark the section as "skip" to be ignored continue # if we are to filter the sections, then do so takeus = True if sections is not None: found = False for sec in sections: if sec == section: found = True sections.remove(sec) if skip: takeus = False break if not found and not skip: takeus = False if takeus: self.actives.append(section) if self.logger is not None: self.logger.verbose_print("SECTION: " + section) self.logger.verbose_print(self.config.items(section)) if sections is not None and 0 != len(sections) and not skip: print( "ERROR: sections were specified for execution and not found:", sections) sys.exit(1) return # Used with combinatorial executor, loads next .ini file to be run with the # sequential executor def configNewTest(self, file): # clear the configuration parser for section in self.config.sections(): self.config.remove_section(section) # read in the file self.config.read(file) for section in self.config.sections(): if section.startswith("SKIP") or section.startswith("skip"): # users often want to temporarily ignore a section # of their test definition file, but don't want to # remove it lest they forget what it did. So let # them just mark the section as "skip" to be ignored continue if self.logger is not None: self.logger.verbose_print("SECTION: " + section) self.logger.verbose_print(self.config.items(section)) return def executeTest(self): if not self.loaded: print("Plugins have not been loaded - cannot execute test") exit(1) if self.config is None: print("No test definition file was parsed - cannot execute test") exit(1) if not self.tools.getPluginByName("sequential", "Executor"): print("Specified executor sequential not found") exit(1) # activate the specified plugin self.tools.activatePluginByName("sequential", "Executor") # execute the provided test description executor = self.tools.getPluginByName("sequential", "Executor") status = executor.plugin_object.execute(self) return status def executeCombinatorial(self): if not self.tools.getPluginByName("combinatorial", "Executor"): print("Specified executor combinatorial not found") exit(1) # activate the specified plugin self.tools.activatePluginByName("combinatorial", "Executor") # execute the provided test description executor = self.tools.getPluginByName("combinatorial", "Executor") status = executor.plugin_object.execute(self) return status def printOptions(self, options): # if the options are empty, report that if not options: lines = ["None"] return lines # create the list of options opts = [] vals = list(options.keys()) for val in vals: opts.append(val) if options[val][0] is None: opts.append("None") elif isinstance(options[val][0], bool): if options[val][0]: opts.append("True") else: opts.append("False") elif isinstance(options[val][0], list): opts.append(" ".join(options[val][0])) elif isinstance(options[val][0], int): opts.append(str(options[val][0])) else: opts.append(options[val][0]) opts.append(options[val][1]) # print the options, their default value, and # the help description in 3 column format max1 = 0 max2 = 0 for i in range(0, len(opts), 3): # we want all the columns to line up # and left-justify, so first find out # the max len of each of the first two # column entries if len(opts[i]) > max1: max1 = len(opts[i]) if type(opts[i + 1]) is not str: optout = str(opts[i + 1]) else: optout = opts[i + 1] if len(optout) > max2: max2 = len(optout) # provide some spacing max1 = max1 + 4 max2 = max2 + 4 # cycle thru again, padding each entry to # align the columns lines = [] sp = " " for i in range(0, len(opts), 3): line = opts[i] + (max1 - len(opts[i])) * sp if type(opts[i + 1]) is not str: optout = str(opts[i + 1]) else: optout = opts[i + 1] line = line + optout + (max2 - len(optout)) * sp # to make this more readable, we will wrap the line at # 130 characters. First, see if the line is going to be # too long if 130 < (len(line) + len(opts[i + 2])): # split the remaining column into individual words words = opts[i + 2].split() first = True for word in words: if (len(line) + len(word)) < 130: if first: line = line + word first = False else: line = line + " " + word else: lines.append(line) line = (max1 + max2) * sp + word if 0 < len(line): lines.append(line) else: # the line is fine - so just add the last piece line = line + opts[i + 2] # append the result lines.append(line) # add one blank line lines.append("") return lines def selectPlugin(self, name, category): if category == "stage": try: availStages = list(self.loader.stages.keys()) for stage in availStages: for pluginInfo in self.stages.getPluginsOfCategory(stage): if name == pluginInfo.plugin_object.print_name(): return pluginInfo.plugin_object # didn't find it return None except: return None elif category == "tool": try: availTools = list(self.loader.tools.keys()) for tool in availTools: for pluginInfo in self.tools.getPluginsOfCategory(tool): if name == pluginInfo.plugin_object.print_name(): return pluginInfo.plugin_object # didn't find it return None except: return None elif category == "utility": try: availUtils = list(self.loader.utilities.keys()) for util in availUtils: for pluginInfo in self.utilities.getPluginsOfCategory( util): if name == pluginInfo.plugin_object.print_name(): return pluginInfo.plugin_object # didn't find it return None except: return None else: print("Unrecognized category:", category) return None
import open_bci_ganglion as bci plugins_paths = ["plugins"] manager.setPluginPlaces(plugins_paths) manager.collectPlugins() board = bci.OpenBCIBoard(port=None, daisy=False, filter_data=True, scaled_output=True, log=False, aux=False) plug_name = 'csv_collect' plug_args = 'record.csv' plug = manager.getPluginByName(plug_name) plug_list = [] callback_list = [] if plug == None: # eg: if an import fail inside a plugin, yapsy skip it print("Error: [ " + plug_name + " ] not found or could not be loaded. Check name and requirements.") else: print("\nActivating [ " + plug_name + " ] plugin...") if not plug.plugin_object.pre_activate(plug_args, sample_rate=board.getSampleRate(), eeg_channels=board.getNbEEGChannels(), aux_channels=board.getNbAUXChannels(), imp_channels=board.getNbImpChannels()): print("Error while activating [ " + plug_name +
class ModuleManager(): def __init__(self, threads, kill_list, job_list): self.threads = threads self.kill_list = kill_list self.job_list = job_list # Running jobs self.pmanager = PluginManager() self.pmanager.setPluginPlaces(["plugins"]) self.pmanager.collectPlugins() self.pmanager.locatePlugins() self.plugins = ['none'] num_plugins = len(self.pmanager.getAllPlugins()) if num_plugins == 0: raise Exception("No Plugins Found!") plugins = [] for plugin in self.pmanager.getAllPlugins(): plugin.threads = threads self.plugins.append(plugin.name) plugin.plugin_object.setname(plugin.name) ## Check for installed binaries executable = '' try: settings = plugin.details.items('Settings') for kv in settings: executable = kv[1] if executable.find('/') != -1 : #Hackish "looks like a file" if os.path.exists(executable): logging.info("Found file: {}".format(executable)) break else: raise Exception() ## TODO detect binaries not in "executable" setting except: raise Exception('[ERROR]: {} -- Binary does not exist -- {}'.format(plugin.name, executable)) plugins.append(plugin.name) print "Plugins found [{}]: {}".format(num_plugins, sorted(plugins)) def run_module(self, module, job_data_orig, tar=False, all_data=False, reads=False, meta=False, overrides=True): """ Keyword Arguments: module -- name of plugin job_data -- dict of job parameters tar -- return tar of all output, rather than module.OUTPUT file all_data -- return module.OUTPUT and list of all files in self.outdir reads -- include files if module.OUTPUT == 'reads' Not recommended for large read files. """ job_data = job_data_orig # job_data = copy.deepcopy(job_data_orig) # # Pass back orig file descriptor # try: # job_data['out_report'] = job_data_orig['out_report'] # except: # pass if not self.has_plugin(module): raise Exception("No plugin named {}".format(module)) plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') plugin.plugin_object.update_settings(job_data) if meta: output = plugin.plugin_object(settings, job_data, self, meta=True) else: output = plugin.plugin_object(settings, job_data, self) log = plugin.plugin_object.out_module.name if tar: tarfile = plugin.plugin_object.tar_output(job_data['job_id']) return output, tarfile, [], log if all_data: if not reads and plugin.plugin_object.OUTPUT == 'reads': #Don't return all files from plugins that output reads data = [] else: data = plugin.plugin_object.get_all_output_files() return output, data, log return output, [], log def output_type(self, module): return self.pmanager.getPluginByName(module).plugin_object.OUTPUT def input_type(self, module): return self.pmanager.getPluginByName(module).plugin_object.INPUT def get_short_name(self, module): try: plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') for kv in settings: if kv[0] == 'short_name': sn = kv[1] break return sn except: return None def get_executable(self, module): try: plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') for kv in settings: if kv[0] == 'short_name': sn = kv[1] break return sn except: return None def has_plugin(self, plugin): if not plugin.lower() in self.plugins: logging.error("{} plugin not found".format(plugin)) return False return True def valid_modules(self, l): """ Return filtered list of available modules """ return [m for m in l if not m.startswith('?') and self.has_plugin(m)] def validate_pipe(self, pipe): for stage in pipe: for word in stage.replace('+', ' ').split(' '): if not (word.startswith('?') or self.has_plugin(word)): raise Exception('Invalid pipeline command') def split_pipe(self, l): """ Splits a multi-module string in to bins Ex: 'kiki ?k=29 velvet' -> [[kiki, ?k=29], [velvet]] """ bins = [] for word in l: if not word.startswith('?'): bins.append([word]) elif word.startswith('?'): bins[-1].append(word) return bins def parse_input(self, pipe): """ Parses inital pipe and separates branching bins Ex: ['sga', '?p=True', 'kiki ?k=31 velvet', 'sspace'] """ stages = phelper.parse_branches(pipe) return stages def parse_pipe(self, pipe): """ Returns the pipeline(s)z of modules. Returns parameter overrides from string. e.g Input: [sga_ec 'kiki ?k=31 velvet ?ins=500' sspace] Output: [kiki, velvet, a5], [{k:31}, {ins:500}, {}] """ # Parse param overrides overrides = [] pipeline = [] module_num = -1 for group in pipe: for word in group.split('+'): if word.lower() == 'none': pass elif not word.startswith('?') and self.has_plugin(word): # is module module_num = module_num + 1 pipeline.append(word) overrides.append({}) elif word[1:-1].find('=') != -1: # is param kv = word[1:].split('=') overrides[module_num] = dict(overrides[module_num].items() + dict([kv]).items()) return pipeline, overrides
class EventDispatcher(object): failed_event_threshold = 10 failed_events = {} def __init__(self): logging.basicConfig(format='%(asctime)s %(levelname)s %(module)s: %(message)s', datefmt='%d.%m.%Y %H:%M:%S', level=logging.INFO) self.logger = logging.getLogger(__name__) self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces(["plugins"]) self.plugin_manager.setPluginInfoExtension('plugin') self._init_plugins() self.sqs_queue = SqsQueue() def _init_plugins(self): self.plugin_manager.collectPlugins() for pluginInfo in self.plugin_manager.getAllPlugins(): self.plugin_manager.activatePluginByName(pluginInfo.name) self.logger.info("Loaded plugin: {0}".format(pluginInfo.name)) def get_plugin_by_name(self, name): plugin = self.plugin_manager.getPluginByName(name) if not plugin: self.logger.debug("No plugin found, using Default!") plugin = self.plugin_manager.getPluginByName("Default") assert plugin, "No handler plugin loaded for resource name {0}".format(name) self.logger.info("Choose {0} handler for resource name {1}".format(plugin.name, name)) return plugin def increment_failure_counter(self, event): if event.id in self.failed_events: self.failed_events[event.id] += 1 else: self.failed_events[event.id] = 1 return self.failed_events[event.id] def get_resource_type_name(self, event): return event.get_property("ResourceType").lstrip("Custom::") def dispatch_event(self, sqs_message): event = CustomResourceEvent(sqs_message) self.logger.info("Handling event: {0}".format(event.id)) self.logger.debug(event) resource_type = self.get_resource_type_name(event) try: event_handler = self.get_plugin_by_name(resource_type) event_handler.plugin_object.handle_event(event) except Exception as e: failure_count = self.increment_failure_counter(event) self.logger.error("Couldn't handle event: " "{0}, error was {1} (try {2}/{3})".format(event, str(e), failure_count, self.failed_event_threshold)) if failure_count < self.failed_event_threshold: sys.exit(1) self.sqs_queue.delete_message(sqs_message) self.logger.info("Deleted event from queue: {0}".format(event.id)) def event_loop(self): while True: # TODO: handle errors here sqs_messages = self.sqs_queue.get_messages() if sqs_messages: self.logger.info("Found {0} events in the queue".format(len(sqs_messages))) for sqs_message in sqs_messages: thread = Thread(target=self.dispatch_event, args=(sqs_message,)) thread.start() def load_plugins(self): pass
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ EXTRA_PLUGINS = ["planetoid", "ipynb", "local_search", "render_mustache"] def __init__(self, **config): """Setup proper environment for running tasks.""" # Register our own path handlers self.path_handlers = {"slug": self.slug_path, "post_path": self.post_path} self.strict = False self.global_data = {} self.posts = [] self.posts_per_year = defaultdict(list) self.posts_per_month = defaultdict(list) self.posts_per_tag = defaultdict(list) self.posts_per_category = defaultdict(list) self.post_per_file = {} self.timeline = [] self.pages = [] self._scanned = False self._template_system = None self._THEMES = None self.loghandlers = [] if not config: self.configured = False else: self.configured = True # This is the default config self.config = { "ADD_THIS_BUTTONS": True, "ANNOTATIONS": False, "ARCHIVE_PATH": "", "ARCHIVE_FILENAME": "archive.html", "BLOG_TITLE": "Default Title", "BLOG_DESCRIPTION": "Default Description", "BODY_END": "", "CACHE_FOLDER": "cache", "CODE_COLOR_SCHEME": "default", "COMMENT_SYSTEM": "disqus", "COMMENTS_IN_GALLERIES": False, "COMMENTS_IN_STORIES": False, "COMPILERS": { "rest": (".txt", ".rst"), "markdown": (".md", ".mdown", ".markdown"), "textile": (".textile",), "txt2tags": (".t2t",), "bbcode": (".bb",), "wiki": (".wiki",), "ipynb": (".ipynb",), "html": (".html", ".htm"), }, "CONTENT_FOOTER": "", "COPY_SOURCES": True, "CREATE_MONTHLY_ARCHIVE": False, "CREATE_SINGLE_ARCHIVE": False, "DATE_FORMAT": "%Y-%m-%d %H:%M", "DEFAULT_LANG": "en", "DEPLOY_COMMANDS": [], "DISABLED_PLUGINS": (), "COMMENT_SYSTEM_ID": "nikolademo", "ENABLED_EXTRAS": (), "EXTRA_HEAD_DATA": "", "FAVICONS": {}, "FEED_LENGTH": 10, "FILE_METADATA_REGEXP": None, "ADDITIONAL_METADATA": {}, "FILES_FOLDERS": {"files": ""}, "FILTERS": {}, "GALLERY_PATH": "galleries", "GALLERY_SORT_BY_DATE": True, "GZIP_COMMAND": None, "GZIP_FILES": False, "GZIP_EXTENSIONS": (".txt", ".htm", ".html", ".css", ".js", ".json", ".xml"), "HIDE_SOURCELINK": False, "HIDE_UNTRANSLATED_POSTS": False, "HYPHENATE": False, "INDEX_DISPLAY_POST_COUNT": 10, "INDEX_FILE": "index.html", "INDEX_TEASERS": False, "INDEXES_TITLE": "", "INDEXES_PAGES": "", "INDEX_PATH": "", "IPYNB_CONFIG": {}, "LICENSE": "", "LINK_CHECK_WHITELIST": [], "LISTINGS_FOLDER": "listings", "NAVIGATION_LINKS": None, "MARKDOWN_EXTENSIONS": ["fenced_code", "codehilite"], "MAX_IMAGE_SIZE": 1280, "MATHJAX_CONFIG": "", "OLD_THEME_SUPPORT": True, "OUTPUT_FOLDER": "output", "POSTS": (("posts/*.txt", "posts", "post.tmpl"),), "PAGES": (("stories/*.txt", "stories", "story.tmpl"),), "PRETTY_URLS": False, "FUTURE_IS_NOW": False, "READ_MORE_LINK": '<p class="more"><a href="{link}">{read_more}…</a></p>', "REDIRECTIONS": [], "RSS_LINK": None, "RSS_PATH": "", "RSS_TEASERS": True, "SEARCH_FORM": "", "SLUG_TAG_PATH": True, "SOCIAL_BUTTONS_CODE": SOCIAL_BUTTONS_CODE, "SITE_URL": "http://getnikola.com/", "STORY_INDEX": False, "STRIP_INDEXES": False, "SITEMAP_INCLUDE_FILELESS_DIRS": True, "TAG_PATH": "categories", "TAG_PAGES_ARE_INDEXES": False, "THEME": "bootstrap", "THEME_REVEAL_CONFIG_SUBTHEME": "sky", "THEME_REVEAL_CONFIG_TRANSITION": "cube", "THUMBNAIL_SIZE": 180, "URL_TYPE": "rel_path", "USE_BUNDLES": True, "USE_CDN": False, "USE_FILENAME_AS_TITLE": True, "TIMEZONE": "UTC", "DEPLOY_DRAFTS": True, "DEPLOY_FUTURE": False, "SCHEDULE_ALL": False, "SCHEDULE_RULE": "", "SCHEDULE_FORCE_TODAY": False, "LOGGING_HANDLERS": {"stderr": {"loglevel": "WARNING", "bubble": True}}, "DEMOTE_HEADERS": 1, } self.config.update(config) # Make sure we have pyphen installed if we are using it if self.config.get("HYPHENATE") and pyphen is None: utils.LOGGER.warn("To use the hyphenation, you have to install " 'the "pyphen" package.') utils.LOGGER.warn("Setting HYPHENATE to False.") self.config["HYPHENATE"] = False # Deprecating post_compilers # TODO: remove on v7 if "post_compilers" in config: utils.LOGGER.warn("The post_compilers option is deprecated, use COMPILERS instead.") if "COMPILERS" in config: utils.LOGGER.warn("COMPILERS conflicts with post_compilers, ignoring post_compilers.") else: self.config["COMPILERS"] = config["post_compilers"] # Deprecating post_pages # TODO: remove on v7 if "post_pages" in config: utils.LOGGER.warn("The post_pages option is deprecated, use POSTS and PAGES instead.") if "POSTS" in config or "PAGES" in config: utils.LOGGER.warn("POSTS and PAGES conflict with post_pages, ignoring post_pages.") else: self.config["POSTS"] = [item[:3] for item in config["post_pages"] if item[-1]] self.config["PAGES"] = [item[:3] for item in config["post_pages"] if not item[-1]] # FIXME: Internally, we still use post_pages because it's a pain to change it self.config["post_pages"] = [] for i1, i2, i3 in self.config["POSTS"]: self.config["post_pages"].append([i1, i2, i3, True]) for i1, i2, i3 in self.config["PAGES"]: self.config["post_pages"].append([i1, i2, i3, False]) # Deprecating DISQUS_FORUM # TODO: remove on v7 if "DISQUS_FORUM" in config: utils.LOGGER.warn("The DISQUS_FORUM option is deprecated, use COMMENT_SYSTEM_ID instead.") if "COMMENT_SYSTEM_ID" in config: utils.LOGGER.warn("DISQUS_FORUM conflicts with COMMENT_SYSTEM_ID, ignoring DISQUS_FORUM.") else: self.config["COMMENT_SYSTEM_ID"] = config["DISQUS_FORUM"] # Deprecating the ANALYTICS option # TODO: remove on v7 if "ANALYTICS" in config: utils.LOGGER.warn("The ANALYTICS option is deprecated, use BODY_END instead.") if "BODY_END" in config: utils.LOGGER.warn("ANALYTICS conflicts with BODY_END, ignoring ANALYTICS.") else: self.config["BODY_END"] = config["ANALYTICS"] # Deprecating the SIDEBAR_LINKS option # TODO: remove on v7 if "SIDEBAR_LINKS" in config: utils.LOGGER.warn("The SIDEBAR_LINKS option is deprecated, use NAVIGATION_LINKS instead.") if "NAVIGATION_LINKS" in config: utils.LOGGER.warn("The SIDEBAR_LINKS conflicts with NAVIGATION_LINKS, ignoring SIDEBAR_LINKS.") else: self.config["NAVIGATION_LINKS"] = config["SIDEBAR_LINKS"] # Compatibility alias self.config["SIDEBAR_LINKS"] = self.config["NAVIGATION_LINKS"] if self.config["NAVIGATION_LINKS"] in (None, {}): self.config["NAVIGATION_LINKS"] = {self.config["DEFAULT_LANG"]: ()} # Deprecating the ADD_THIS_BUTTONS option # TODO: remove on v7 if "ADD_THIS_BUTTONS" in config: utils.LOGGER.warn("The ADD_THIS_BUTTONS option is deprecated, use SOCIAL_BUTTONS_CODE instead.") if not config["ADD_THIS_BUTTONS"]: utils.LOGGER.warn("Setting SOCIAL_BUTTONS_CODE to empty because ADD_THIS_BUTTONS is False.") self.config["SOCIAL_BUTTONS_CODE"] = "" # STRIP_INDEX_HTML config has been replaces with STRIP_INDEXES # Port it if only the oldef form is there # TODO: remove on v7 if "STRIP_INDEX_HTML" in config and "STRIP_INDEXES" not in config: utils.LOGGER.warn("You should configure STRIP_INDEXES instead of STRIP_INDEX_HTML") self.config["STRIP_INDEXES"] = config["STRIP_INDEX_HTML"] # PRETTY_URLS defaults to enabling STRIP_INDEXES unless explicitly disabled if config.get("PRETTY_URLS", False) and "STRIP_INDEXES" not in config: self.config["STRIP_INDEXES"] = True if config.get("COPY_SOURCES") and not self.config["HIDE_SOURCELINK"]: self.config["HIDE_SOURCELINK"] = True self.config["TRANSLATIONS"] = self.config.get("TRANSLATIONS", {self.config["DEFAULT_LANG"]: ""}) # SITE_URL is required, but if the deprecated BLOG_URL # is available, use it and warn # TODO: remove on v7 if "SITE_URL" not in self.config: if "BLOG_URL" in self.config: utils.LOGGER.warn("You should configure SITE_URL instead of BLOG_URL") self.config["SITE_URL"] = self.config["BLOG_URL"] self.default_lang = self.config["DEFAULT_LANG"] self.translations = self.config["TRANSLATIONS"] locale_fallback, locale_default, locales = sanitized_locales( self.config.get("LOCALE_FALLBACK", None), self.config.get("LOCALE_DEFAULT", None), self.config.get("LOCALES", {}), self.translations, ) # NOQA utils.LocaleBorg.initialize(locales, self.default_lang) # BASE_URL defaults to SITE_URL if "BASE_URL" not in self.config: self.config["BASE_URL"] = self.config.get("SITE_URL") # BASE_URL should *always* end in / if self.config["BASE_URL"] and self.config["BASE_URL"][-1] != "/": utils.LOGGER.warn("Your BASE_URL doesn't end in / -- adding it.") self.plugin_manager = PluginManager( categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, "TaskMultiplier": TaskMultiplier, "RestExtension": RestExtension, "SignalHandler": SignalHandler, } ) self.plugin_manager.setPluginInfoExtension("plugin") if sys.version_info[0] == 3: places = [os.path.join(os.path.dirname(__file__), "plugins"), os.path.join(os.getcwd(), "plugins")] else: places = [ os.path.join(os.path.dirname(__file__), utils.sys_encode("plugins")), os.path.join(os.getcwd(), utils.sys_encode("plugins")), ] self.plugin_manager.setPluginPlaces(places) self.plugin_manager.collectPlugins() # Activate all required SignalHandler plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("SignalHandler"): if plugin_info.name in self.config.get("DISABLED_PLUGINS"): self.plugin_manager.removePluginFromCategory(plugin_info, "SignalHandler") else: self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Emit signal for SignalHandlers which need to start running immediately. signal("sighandlers_loaded").send(self) self.commands = {} # Activate all command plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("Command"): if plugin_info.name in self.config["DISABLED_PLUGINS"] or ( plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config["ENABLED_EXTRAS"] ): self.plugin_manager.removePluginFromCategory(plugin_info, "Command") continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) plugin_info.plugin_object.short_help = plugin_info.description self.commands[plugin_info.name] = plugin_info.plugin_object # Activate all task plugins for task_type in ["Task", "LateTask"]: for plugin_info in self.plugin_manager.getPluginsOfCategory(task_type): if plugin_info.name in self.config["DISABLED_PLUGINS"] or ( plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config["ENABLED_EXTRAS"] ): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all multiplier plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("TaskMultiplier"): if plugin_info.name in self.config["DISABLED_PLUGINS"] or ( plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config["ENABLED_EXTRAS"] ): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all required compiler plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("PageCompiler"): if plugin_info.name in self.config["COMPILERS"].keys(): self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # set global_context for template rendering self._GLOBAL_CONTEXT = {} self._GLOBAL_CONTEXT["_link"] = self.link self._GLOBAL_CONTEXT["set_locale"] = utils.LocaleBorg().set_locale self._GLOBAL_CONTEXT["rel_link"] = self.rel_link self._GLOBAL_CONTEXT["abs_link"] = self.abs_link self._GLOBAL_CONTEXT["exists"] = self.file_exists self._GLOBAL_CONTEXT["SLUG_TAG_PATH"] = self.config["SLUG_TAG_PATH"] self._GLOBAL_CONTEXT["annotations"] = self.config["ANNOTATIONS"] self._GLOBAL_CONTEXT["index_display_post_count"] = self.config["INDEX_DISPLAY_POST_COUNT"] self._GLOBAL_CONTEXT["use_bundles"] = self.config["USE_BUNDLES"] self._GLOBAL_CONTEXT["use_cdn"] = self.config.get("USE_CDN") self._GLOBAL_CONTEXT["favicons"] = self.config["FAVICONS"] self._GLOBAL_CONTEXT["date_format"] = self.config.get("DATE_FORMAT", "%Y-%m-%d %H:%M") self._GLOBAL_CONTEXT["blog_author"] = self.config.get("BLOG_AUTHOR") self._GLOBAL_CONTEXT["blog_title"] = self.config.get("BLOG_TITLE") # TODO: remove fallback in v7 self._GLOBAL_CONTEXT["blog_url"] = self.config.get("SITE_URL", self.config.get("BLOG_URL")) self._GLOBAL_CONTEXT["blog_desc"] = self.config.get("BLOG_DESCRIPTION") self._GLOBAL_CONTEXT["body_end"] = self.config.get("BODY_END") # TODO: remove in v7 self._GLOBAL_CONTEXT["analytics"] = self.config.get("BODY_END") # TODO: remove in v7 self._GLOBAL_CONTEXT["add_this_buttons"] = self.config.get("SOCIAL_BUTTONS_CODE") self._GLOBAL_CONTEXT["social_buttons_code"] = self.config.get("SOCIAL_BUTTONS_CODE") self._GLOBAL_CONTEXT["translations"] = self.config.get("TRANSLATIONS") self._GLOBAL_CONTEXT["license"] = self.config.get("LICENSE") self._GLOBAL_CONTEXT["search_form"] = self.config.get("SEARCH_FORM") self._GLOBAL_CONTEXT["comment_system"] = self.config.get("COMMENT_SYSTEM") self._GLOBAL_CONTEXT["comment_system_id"] = self.config.get("COMMENT_SYSTEM_ID") # TODO: remove in v7 self._GLOBAL_CONTEXT["disqus_forum"] = self.config.get("COMMENT_SYSTEM_ID") self._GLOBAL_CONTEXT["mathjax_config"] = self.config.get("MATHJAX_CONFIG") self._GLOBAL_CONTEXT["subtheme"] = self.config.get("THEME_REVEAL_CONFIG_SUBTHEME") self._GLOBAL_CONTEXT["transition"] = self.config.get("THEME_REVEAL_CONFIG_TRANSITION") self._GLOBAL_CONTEXT["content_footer"] = self.config.get("CONTENT_FOOTER") self._GLOBAL_CONTEXT["rss_path"] = self.config.get("RSS_PATH") self._GLOBAL_CONTEXT["rss_link"] = self.config.get("RSS_LINK") self._GLOBAL_CONTEXT["navigation_links"] = utils.Functionary(list, self.config["DEFAULT_LANG"]) for k, v in self.config.get("NAVIGATION_LINKS", {}).items(): self._GLOBAL_CONTEXT["navigation_links"][k] = v # TODO: remove on v7 # Compatibility alias self._GLOBAL_CONTEXT["sidebar_links"] = self._GLOBAL_CONTEXT["navigation_links"] self._GLOBAL_CONTEXT["twitter_card"] = self.config.get("TWITTER_CARD", {}) self._GLOBAL_CONTEXT["hide_sourcelink"] = self.config.get("HIDE_SOURCELINK") self._GLOBAL_CONTEXT["extra_head_data"] = self.config.get("EXTRA_HEAD_DATA") self._GLOBAL_CONTEXT.update(self.config.get("GLOBAL_CONTEXT", {})) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for plugin_info in self.plugin_manager.getPluginsOfCategory("PageCompiler"): self.compilers[plugin_info.name] = plugin_info.plugin_object signal("configured").send(self) def _get_themes(self): if self._THEMES is None: # Check for old theme names (Issue #650) TODO: remove in v7 theme_replacements = {"site": "bootstrap", "orphan": "base", "default": "oldfashioned"} if self.config["THEME"] in theme_replacements: utils.LOGGER.warn( 'You are using the old theme "{0}", using "{1}" instead.'.format( self.config["THEME"], theme_replacements[self.config["THEME"]] ) ) self.config["THEME"] = theme_replacements[self.config["THEME"]] if self.config["THEME"] == "oldfashioned": utils.LOGGER.warn( """You may need to install the "oldfashioned" theme """ """from themes.nikola.ralsina.com.ar because it's not """ """shipped by default anymore.""" ) utils.LOGGER.warn("Please change your THEME setting.") try: self._THEMES = utils.get_theme_chain(self.config["THEME"]) except Exception: utils.LOGGER.warn("""Can't load theme "{0}", using 'bootstrap' instead.""".format(self.config["THEME"])) self.config["THEME"] = "bootstrap" return self._get_themes() # Check consistency of USE_CDN and the current THEME (Issue #386) if self.config["USE_CDN"]: bootstrap_path = utils.get_asset_path(os.path.join("assets", "css", "bootstrap.min.css"), self._THEMES) if bootstrap_path and bootstrap_path.split(os.sep)[-4] not in ["bootstrap", "bootstrap3"]: utils.LOGGER.warn( "The USE_CDN option may be incompatible with your theme, because it uses a hosted version of bootstrap." ) return self._THEMES THEMES = property(_get_themes) def _get_messages(self): return utils.load_messages(self.THEMES, self.translations, self.default_lang) MESSAGES = property(_get_messages) def _get_global_context(self): """Initialize some parts of GLOBAL_CONTEXT only when it's queried.""" if "messages" not in self._GLOBAL_CONTEXT: self._GLOBAL_CONTEXT["messages"] = self.MESSAGES if "has_custom_css" not in self._GLOBAL_CONTEXT: # check if custom css exist and is not empty custom_css_path = utils.get_asset_path("assets/css/custom.css", self.THEMES, self.config["FILES_FOLDERS"]) if custom_css_path and self.file_exists(custom_css_path, not_empty=True): self._GLOBAL_CONTEXT["has_custom_css"] = True else: self._GLOBAL_CONTEXT["has_custom_css"] = False return self._GLOBAL_CONTEXT GLOBAL_CONTEXT = property(_get_global_context) def _get_template_system(self): if self._template_system is None: # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName(template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading {0} template system " "plugin\n".format(template_sys_name)) sys.exit(1) self._template_system = pi.plugin_object lookup_dirs = ["templates"] + [ os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES ] self._template_system.set_directories(lookup_dirs, self.config["CACHE_FOLDER"]) return self._template_system template_system = property(_get_template_system) def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.COMPILERS` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in list(self.config["COMPILERS"].items()) if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit( "Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'COMPILERS' in conf.py\n(The error is in" "one of {0})".format(", ".join(langs)) ) elif len(langs) > 1: langs = langs[:1] else: exit("COMPILERS in conf.py does not tell me how to " "handle '{0}' extensions.".format(ext)) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.GLOBAL_CONTEXT) local_context.update(context) # string, arguments local_context["formatmsg"] = lambda s, *a: s % a data = self.template_system.render_template(template_name, None, local_context) assert output_name.startswith(self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1 :] # Treat our site as if output/ is "/" and then make all URLs relative, # making the site "relocatable" src = os.sep + url_part src = os.path.normpath(src) # The os.sep is because normpath will change "/" to "\" on windows src = "/".join(src.split(os.sep)) parsed_src = urlsplit(src) src_elems = parsed_src.path.split("/")[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == "link": # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip("/"), context["lang"]) else: return dst # Normalize dst = urljoin(src, dst) # Avoid empty links. if src == dst: if self.config.get("URL_TYPE") == "absolute": dst = urljoin(self.config["BASE_URL"], dst) return dst elif self.config.get("URL_TYPE") == "full_path": return dst else: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: if self.config.get("URL_TYPE") == "absolute": dst = urljoin(self.config["BASE_URL"], dst) return dst if self.config.get("URL_TYPE") in ("full_path", "absolute"): if self.config.get("URL_TYPE") == "absolute": dst = urljoin(self.config["BASE_URL"], dst) return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split("/")[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = "/".join([".."] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result utils.makedirs(os.path.dirname(output_name)) doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b"<!DOCTYPE html>" + lxml.html.tostring(doc, encoding="utf8") with open(output_name, "wb+") as post_file: post_file.write(data) def path(self, kind, name, lang=None, is_link=False): """Build the path to a certain kind of page. These are mostly defined by plugins by registering via the register_path_handler method, except for slug and post_path which are defined in this class' init method. Here's some of the others, for historical reasons: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * category (and name is the category name) * category_rss (and name is the category name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) * post_path (name is 1st element in a POSTS/PAGES tuple) * slug (name is the slug of a post or story) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ if lang is None: lang = utils.LocaleBorg().current_lang path = self.path_handlers[kind](name, lang) if is_link: link = "/" + ("/".join(path)) index_len = len(self.config["INDEX_FILE"]) if self.config["STRIP_INDEXES"] and link[-(1 + index_len) :] == "/" + self.config["INDEX_FILE"]: return link[:-index_len] else: return link else: return os.path.join(*path) def post_path(self, name, lang): """post_path path handler""" return [ _f for _f in [self.config["TRANSLATIONS"][lang], os.path.dirname(name), self.config["INDEX_FILE"]] if _f ] def slug_path(self, name, lang): """slug path handler""" results = [p for p in self.timeline if p.meta("slug") == name] if not results: utils.LOGGER.warning("Can't resolve path request for slug: {0}".format(name)) else: if len(results) > 1: utils.LOGGER.warning("Ambiguous path request for slug: {0}".format(name)) return [_f for _f in results[0].permalink(lang).split("/") if _f] def register_path_handler(self, kind, f): if kind in self.path_handlers: utils.LOGGER.warning("Conflicting path handlers for kind: {0}".format(kind)) else: self.path_handlers[kind] = f def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config["BASE_URL"], dst) return urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urljoin(self.config["BASE_URL"], src) dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split("/")[1:] dst_elems = parsed_dst.path.split("/")[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return "/".join([".."] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def clean_task_paths(self, task): """Normalize target paths in the task.""" targets = task.get("targets", None) if targets is not None: task["targets"] = [os.path.normpath(t) for t in targets] return task def gen_tasks(self, name, plugin_category, doc=""): def flatten(task): if isinstance(task, dict): yield task else: for t in task: for ft in flatten(t): yield ft task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory(plugin_category): for task in flatten(pluginInfo.plugin_object.gen_tasks()): assert "basename" in task task = self.clean_task_paths(task) yield task for multi in self.plugin_manager.getPluginsOfCategory("TaskMultiplier"): flag = False for task in multi.plugin_object.process(task, name): flag = True yield self.clean_task_paths(task) if flag: task_dep.append("{0}_{1}".format(name, multi.plugin_object.name)) if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield {"basename": name, "doc": doc, "actions": None, "clean": True, "task_dep": task_dep} def scan_posts(self): """Scan all the posts.""" if self._scanned: return seen = set([]) print("Scanning posts", end="", file=sys.stderr) lower_case_tags = set([]) for wildcard, destination, template_name, use_in_feeds in self.config["post_pages"]: print(".", end="", file=sys.stderr) dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.normpath(os.path.join(destination, os.path.relpath(dirpath, dirname))) full_list = glob.glob(dir_glob) # Now let's look for things that are not in default_lang for lang in self.config["TRANSLATIONS"].keys(): lang_glob = dir_glob + "." + lang translated_list = glob.glob(lang_glob) for fname in translated_list: orig_name = os.path.splitext(fname)[0] if orig_name in full_list: continue full_list.append(orig_name) # We eliminate from the list the files inside any .ipynb folder full_list = [p for p in full_list if not any([x.startswith(".") for x in p.split(os.sep)])] for base_path in full_list: if base_path in seen: continue else: seen.add(base_path) post = Post( base_path, self.config, dest_dir, use_in_feeds, self.MESSAGES, template_name, self.get_compiler(base_path), ) self.global_data[post.source_path] = post if post.use_in_feeds: self.posts.append(post.source_path) self.posts_per_year[str(post.date.year)].append(post.source_path) self.posts_per_month["{0}/{1:02d}".format(post.date.year, post.date.month)].append( post.source_path ) for tag in post.alltags: if tag.lower() in lower_case_tags: if tag not in self.posts_per_tag: # Tags that differ only in case other_tag = [k for k in self.posts_per_tag.keys() if k.lower() == tag.lower()][0] utils.LOGGER.error( "You have cases that differ only in upper/lower case: {0} and {1}".format( tag, other_tag ) ) utils.LOGGER.error("Tag {0} is used in: {1}".format(tag, post.source_path)) utils.LOGGER.error( "Tag {0} is used in: {1}".format( other_tag, ", ".join(self.posts_per_tag[other_tag]) ) ) sys.exit(1) else: lower_case_tags.add(tag.lower()) self.posts_per_tag[tag].append(post.source_path) self.posts_per_category[post.meta("category")].append(post.source_path) else: self.pages.append(post) if self.config["OLD_THEME_SUPPORT"]: post._add_old_metadata() self.post_per_file[post.destination_path(lang=lang)] = post self.post_per_file[post.destination_path(lang=lang, extension=post.source_ext())] = post for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!", file=sys.stderr) def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + self.template_system.template_deps(post.template_name) deps.extend(utils.get_asset_path(x, self.THEMES) for x in ("bundles", "parent", "engine")) deps = list(filter(None, deps)) context["post"] = post context["lang"] = lang context["title"] = post.title(lang) context["description"] = post.description(lang) context["permalink"] = post.permalink(lang) context["page_list"] = self.pages if post.use_in_feeds: context["enable_comments"] = True else: context["enable_comments"] = self.config["COMMENTS_IN_STORIES"] extension = self.get_compiler(post.source_path).extension() output_name = os.path.join(self.config["OUTPUT_FOLDER"], post.destination_path(lang, extension)) deps_dict = copy(context) deps_dict.pop("post") if post.prev_post: deps_dict["PREV_LINK"] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict["NEXT_LINK"] = [post.next_post.permalink(lang)] deps_dict["OUTPUT_FOLDER"] = self.config["OUTPUT_FOLDER"] deps_dict["TRANSLATIONS"] = self.config["TRANSLATIONS"] deps_dict["global"] = self.GLOBAL_CONTEXT deps_dict["comments"] = context["enable_comments"] if post: deps_dict["post_translations"] = post.translated_to task = { "name": os.path.normpath(output_name), "file_dep": deps, "targets": [output_name], "actions": [(self.render_template, [post.template_name, output_name, context])], "clean": True, "uptodate": [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config["BLOG_TITLE"] context["description"] = self.config["BLOG_DESCRIPTION"] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.meta[lang]["title"], p.permalink(lang)) for p in posts] deps_context["global"] = self.GLOBAL_CONTEXT task = { "name": os.path.normpath(output_name), "targets": [output_name], "file_dep": deps, "actions": [(self.render_template, [template_name, output_name, context])], "clean": True, "uptodate": [config_changed(deps_context)], } return utils.apply_filters(task, filters)
class ModuleManager(): def __init__(self, threads, kill_list, kill_list_lock, job_list, binpath): self.threads = threads self.kill_list = kill_list self.kill_list_lock = kill_list_lock self.job_list = job_list # Running jobs self.binpath = binpath self.root_path = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..', '..')) self.module_bin_path = os.path.join(self.root_path, "module_bin") self.plugin_path = os.path.join(self.root_path, "lib", "assembly", "plugins") self.pmanager = PluginManager() locator = self.pmanager.getPluginLocator() locator.setPluginInfoExtension('asm-plugin') self.pmanager.setPluginPlaces([ self.plugin_path ]) self.pmanager.collectPlugins() self.pmanager.locatePlugins() self.plugins = ['none'] num_plugins = len(self.pmanager.getAllPlugins()) if num_plugins == 0: raise Exception("No Plugins Found!") plugins = [] self.executables = {} for plugin in self.pmanager.getAllPlugins(): plugin.threads = threads self.plugins.append(plugin.name) plugin.plugin_object.setname(plugin.name) ## Check for installed binaries try: version = plugin.details.get('Documentation', 'Version') executables = plugin.details.items('Executables') full_execs = [(k, self.get_executable_path(v)) for k,v in executables] for binary in full_execs: if not os.path.exists(binary[1]): if float(version) < 1: print '[Warning]: {} (v{}) -- Binary does not exist for beta plugin -- {}'.format(plugin.name, version, binary[1]) else: raise Exception('[ERROR]: {} (v{})-- Binary does not exist -- {}'.format(plugin.name, version, binary[1])) self.executables[plugin.name] = full_execs except ConfigParser.NoSectionError: pass plugins.append(plugin.name) print "Plugins found [{}]: {}".format(num_plugins, sorted(plugins)) def run_proc(self, module, wlink, job_data, parameters): """ Run module adapter for wasp interpreter To support the Job_data mechanism, injects wlink """ if not self.has_plugin(module): raise Exception("No plugin named {}".format(module)) plugin = self.pmanager.getPluginByName(module) config_settings = plugin.details.items('Settings') config_settings = update_settings(config_settings, parameters) try: settings = {k:v for k,v in self.executables[module]} for k,v in config_settings: ## Don't override if not k in settings: settings[k] = v settings = settings.items() except: # settings = config_settings raise Exception("Plugin Config not updated: {}!".format(module)) #### Check input/output type compatibility if wlink['link']: for link in wlink['link']: if not link: continue if link['module']: try: assert (self.output_type(link['module']) == self.input_type(module) or self.output_type(link['module']) in self.input_type(module)) except AssertionError: raise Exception('{} and {} have mismatched input/output types'.format(module, link['module'])) #### Run job_data['wasp_chain'] = wlink output = plugin.plugin_object.base_call(settings, job_data, self) ot = self.output_type(module) wlink.insert_output(output, ot, plugin.name) if not wlink.output: raise Exception('"{}" module failed to produce {}'.format(module, ot)) def output_type(self, module): return self.pmanager.getPluginByName(module).plugin_object.OUTPUT def input_type(self, module): return self.pmanager.getPluginByName(module).plugin_object.INPUT def get_short_name(self, module): try: plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') for kv in settings: if kv[0] == 'short_name': sn = kv[1] break return sn except: return None def get_executable(self, module): try: plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') for kv in settings: if kv[0] == 'short_name': sn = kv[1] break return sn except: return None def verify_file(self, filename): if not os.path.exists(filename): raise Exception("File not found: %s" % filename) def get_executable_path(self, filename, verify=False): guess1 = os.path.join(self.module_bin_path, filename) guess2 = os.path.join(self.binpath, filename) fullname = guess1 if os.path.exists(guess1) else guess2 if verify: verify_file(fullname) return fullname def has_plugin(self, plugin): if not plugin.lower() in self.plugins: logging.error("{} plugin not found".format(plugin)) return False return True def valid_modules(self, l): """ Return filtered list of available modules """ return [m for m in l if not m.startswith('?') and self.has_plugin(m)] def validate_pipe(self, pipe): for stage in pipe: for word in stage.replace('+', ' ').split(' '): if not (word.startswith('?') or self.has_plugin(word)): raise Exception('Invalid pipeline command') def split_pipe(self, l): """ Splits a multi-module string in to bins Ex: 'kiki ?k=29 velvet' -> [[kiki, ?k=29], [velvet]] """ bins = [] for word in l: if not word.startswith('?'): bins.append([word]) elif word.startswith('?'): bins[-1].append(word) return bins def parse_input(self, pipe): """ Parses inital pipe and separates branching bins Ex: ['sga', '?p=True', 'kiki ?k=31 velvet', 'sspace'] """ stages = phelper.parse_branches(pipe) return stages
class app(): db = None # the sqlite database of plugins plugins = None # Configured plugins storage = None # The plugin to use for storage PluginFolder = None # Folder where the plugins are MinionFolder = None # Folder where the minions are score = None # the plugin to use for scoring classify = None # the clasification plugin helper = None # The verum helper functions loc = None # The verum lcoation def __init__(self, PluginFolder=PluginFolder, MinionFolder=MinionFolder): #global PluginFolder self.PluginFolder = PluginFolder #global MinionsFolder self.MinionFolder = MinionFolder # Load enrichments database self.db = self.set_db() # LOAD HELPER FROM SAME DIRECTORY fp, pathname, description = imp.find_module("helper", [loc]) self.helper = imp.load_module("helper", fp, pathname, description) # Save the verum location self.loc = loc[: -6] # -6 removed the trailing "verum/" from the location. # Load the plugins Directory if self.PluginFolder: self.load_plugins() else: logging.warning( "Plugin folder doesn't exist. Plugins not configured. Please run set_plugin_folder(<PluginFolder>) to set the plugin folder and then load_plugins() to load plugins." ) ## PLUGIN FUNCTIONS def set_plugin_folder(self, PluginFolder): self.PluginFolder = PluginFolder def get_plugin_folder(self): return self.PluginFolder # Load the plugins from the plugin directory. def load_plugins(self): print "Configuring Plugin manager." self.plugins = PluginManager() if self.MinionFolder is None: self.plugins.setPluginPlaces([self.PluginFolder]) else: self.plugins.setPluginPlaces( [self.PluginFolder, self.MinionFolder]) #self.plugins.collectPlugins() self.plugins.locatePlugins() self.plugins.loadPlugins() print "Plugin manager configured." # Loop round the plugins and print their names. cur = self.db.cursor() # Clear tables cur.execute("""DELETE FROM enrichments""") cur.execute("""DELETE FROM inputs""") cur.execute("""DELETE FROM storage""") cur.execute("""DELETE FROM score""") cur.execute("""DELETE FROM minion""") for plugin in self.plugins.getAllPlugins(): plugin_config = plugin.plugin_object.configure() # Insert enrichment if plugin_config[0] == 'enrichment': # type cur.execute( '''INSERT INTO enrichments VALUES (?, ?, ?, ?, ?)''', ( plugin_config[2], # Name int(plugin_config[1]), # Enabled plugin_config[3], # Descripton plugin_config[5], # Cost plugin_config[6]) # Speed ) for inp in plugin_config[4]: # inputs # Insert into inputs table cur.execute('''INSERT INTO inputs VALUES (?,?)''', (plugin_config[2], inp)) self.db.commit() elif plugin_config[0] == 'interface': # type cur.execute('''INSERT INTO storage VALUES (?, ?)''', (plugin_config[2], int(plugin_config[1]))) elif plugin_config[0] == 'score': cur.execute( '''INSERT INTO score VALUES (?, ?, ?, ?, ?)''', ( plugin_config[2], # Name int(plugin_config[1]), # Enabled plugin_config[3], # Descripton plugin_config[4], # Cost plugin_config[5]) # Speed ) if plugin_config[0] == 'minion': plugin_config = plugin.plugin_object.configure(self) cur.execute( '''INSERT INTO minion VALUES (?, ?, ?, ?)''', ( plugin_config[2], # Name int(plugin_config[1]), # Enabled plugin_config[3], # Descripton plugin_config[4]) # Speed ) if plugin.name == "classify": # Classify is a unique name. TODO: figure out if handling multiple 'classify' plugins is necessary self.classify = plugin.plugin_object print "Configured {2} plugin {0}. Success: {1}".format( plugin.name, plugin_config[1], plugin_config[0]) def set_db(self): """ Sets up the enrichment sqlite in memory database """ conn = sqlite3.connect(":memory:") cur = conn.cursor() # Create enrichments table cur.execute( '''CREATE TABLE enrichments (name text NOT NULL PRIMARY KEY, configured int, description text, cost int, speed int);''') # Create inputs table cur.execute('''CREATE TABLE inputs (name text NOT NULL, input text NOT NULL, PRIMARY KEY (name, input), FOREIGN KEY (name) REFERENCES enrichments(name));''' ) # Create interface table cur.execute('''CREATE TABLE storage (name text NOT NULL PRIMARY KEY, configured int );''') # Create score table cur.execute('''CREATE TABLE score (name text NOT NULL PRIMARY KEY, configured int, description text, cost int, speed int);''') # Create minion table cur.execute('''CREATE TABLE minion (name text NOT NULL PRIMARY KEY, configured int, description text, cost int);''') conn.commit() return conn ## ENRICHMENT FUNCTIONS def get_inputs(self): """ NoneType -> list of strings :return: A list of the potential enrichment inputs (ip, domain, etc) """ inputs = list() cur = self.db.cursor() for row in cur.execute('''SELECT DISTINCT input FROM inputs;'''): inputs.append(row[0]) return inputs def get_enrichments(self, inputs, cost=10000, speed=10000, configured=True): """ :param inputs: list of input types. (e.g. ["ip", "domain"]) All enrichments that match at least 1 input type will be returned. :param cost: integer 1-10 of resource cost of running the enrichment. (1 = cheapest) :param speed: integer 1-10 speed of enrichment. (1 = fastest) :param enabled: Plugin is correctly configured. If false, plugin may not run correctly. :return: list of tuples of (names, type) of enrichments matching the criteria """ cur = self.db.cursor() if type(inputs) == str: inputs = [inputs] plugins = list() names = list() for row in cur.execute( """ SELECT DISTINCT e.name, i.input FROM enrichments e, inputs i WHERE e.name = i.name AND e.cost <= ? AND e.speed <= ? AND configured = ? AND i.input IN ({0})""".format( ("?," * len(inputs))[:-1]), [cost, speed, int(configured)] + inputs): plugins.append(tuple(row)) return plugins def run_enrichments(self, topic, topic_type, names=None, cost=10, speed=10, start_time=""): """ :param topic: topic to enrich (e.g. "1.1.1.1", "www.google.com") :param topic_type: type of topic (e.g. "ip", "domain") :param cost: integer 1-10 of resource cost of running the enrichment. (1 = cheapest) :param speed: integer 1-10 speed of enrichment. (1 = fastest) :param names: a name (as string) or a list of names of enrichments to use :return: None if storage configured (networkx graph representing the enrichment of the topic """ enrichments = self.get_enrichments([topic_type], cost, speed, configured=True) enrichments = [e[0] for e in enrichments] #print enrichments # DEBUG g = nx.MultiDiGraph() # IF a name(s) are given, subset to them if names: enrichments = set(enrichments).intersection(set(names)) for enrichment in enrichments: # get the plugin plugin = self.plugins.getPluginByName(enrichment) # run the plugin g2 = plugin.plugin_object.run(topic, start_time) # merge the graphs for node, props in g2.nodes(data=True): g.add_node(node, props) for edge in g2.edges(data=True): g.add_edge(edge[0], edge[1], attr_dict=edge[2]) return g ## INTERFACE FUNCTIONS def get_interfaces(self, configured=None): """ :return: list of strings of names of interface plugins """ cur = self.db.cursor() interfaces = list() if configured is None: for row in cur.execute('''SELECT DISTINCT name FROM storage;'''): interfaces.append(row[0]) else: for row in cur.execute( '''SELECT DISTINCT name from storage WHERE configured=?;''', (int(configured), )): interfaces.append(row[0]) return interfaces def get_default_interface(self): return self.storage def set_interface(self, interface): """ :param interface: The name of the plugin to use for storage. Sets the storage backend to use. It must have been configured through a plugin prior to setting. """ cur = self.db.cursor() configured_storage = list() for row in cur.execute( '''SELECT DISTINCT name FROM storage WHERE configured=1;'''): configured_storage.append(row[0]) if interface in configured_storage: self.storage = interface else: raise ValueError( "Requested interface {0} not configured. Options are {1}.". format(interface, configured_storage)) def get_minions(self, cost=10000, configured=None): """ :param cost: a maximum cost of running the minion :param configured: True, False, or None (for both). :return: list of strings of tuples of (name, description) of minion plugins """ cur = self.db.cursor() minions = list() if configured is None: for row in cur.execute( '''SELECT DISTINCT name, description FROM minion WHERE cost <= ?;''', [int(cost)]): minions.append(tuple(row[0:2])) else: for row in cur.execute( '''SELECT DISTINCT name, description FROM minion WHERE cost <= ? AND configured=?;''', [int(cost), int(configured)]): minions.append(tuple(row[0:2])) return minions def start_minions(self, names=None, cost=10000): """ :param names: a list of names of minions to run :param cost: a maximum cost for minions """ minions = self.get_minions(cost=cost, configured=True) minions = [m[0] for m in minions] # IF a name(s) are given, subset to them if names: minions = set(minions).intersection(set(names)) for minion in minions: # get the plugin plugin = self.plugins.getPluginByName(minion) # start the plugin plugin.plugin_object.start() def get_running_minions(self): """ :return: A set of names of minions which are running """ minions = self.get_minions(cost=10000, configured=True) minions = [m[0] for m in minions] running_minions = set() # Iterate Through the minions for minion in minions: plugin = self.plugins.getPluginByName(minion) if plugin.plugin_object.isAlive(): running_minions.add(minion) return running_minions def stop_minions(self, names=None): minions = self.get_running_minions() if names is not None: minions = set(minions).intersection(set(names)) for minion in minions: # get the plugin plugin = self.plugins.getPluginByName(minion) # start the plugin plugin.plugin_object.stop() def run_query(self, topic, max_depth=4, dont_follow=['enrichment', 'classification'], storage=None): """ :param storage: the storage plugin to use :return: a networkx subgraph surrounded around the topic """ if not storage: storage = self.storage if not storage: raise ValueError( "No storage set. run set_storage() to set or provide directly. Storage must be a configured plugin." ) else: # get the plugin plugin = self.plugins.getPluginByName(self.storage) return plugin.plugin_object.query(topic, max_depth=max_depth, dont_follow=dont_follow) def store_graph(self, g, storage=None): """ :param g: a networkx graph to merge with the set storage """ if not storage: storage = self.storage if not storage: raise ValueError( "No storage set. run set_storage() to set or provide directly. Storage must be a configured plugin." ) else: # get the plugin plugin = self.plugins.getPluginByName(self.storage) # merge the graph plugin.plugin_object.enrich(g) ## SCORE FUNCTIONS def get_scoring_plugins(self, cost=10000, speed=10000, names=None, configured=True): """ :param cost: integer 1-10 of resource cost of running the enrichment. (1 = cheapest) :param speed: integer 1-10 speed of enrichment. (1 = fastest) :param enabled: Plugin is correctly configured. If false, plugin may not run correctly. :return: list of names of scoring plugins matching the criteria """ cur = self.db.cursor() plugins = list() if names is None: for row in cur.execute( '''SELECT DISTINCT name FROM score WHERE cost <= ? AND speed <= ? AND configured = ?''', [cost, speed, int(configured)]): plugins.append(row[0]) else: for row in cur.execute( '''SELECT DISTINCT name FROM score WHERE cost <= ? AND speed <= ? AND configured = ? AND name IN ({0});'''.format( ("?," * len(names))[:-1]), [cost, speed, int(configured)] + names): plugins.append(row[0]) return plugins def score_subgraph(self, topic, sg, plugin_name=None): if plugin_name is None: plugin_name = self.score score_plugin = self.plugins.getPluginByName(plugin_name) return score_plugin.plugin_object.score(sg, topic) def set_scoring_plugin(self, plugin): """ :param interface: The name of the plugin to use for storage. Sets the storage backend to use. It must have been configured through a plugin prior to setting. """ cur = self.db.cursor() configured_scoring_plugins = list() for row in cur.execute( '''SELECT DISTINCT name FROM score WHERE configured=1;'''): configured_scoring_plugins.append(row[0]) if plugin in configured_scoring_plugins: self.score = plugin else: raise ValueError( "Requested scoring plugin {0} is not configured. Options are {1}." .format(plugin, configured_scoring_plugins)) def get_default_scoring_plugin(self): return self.score
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ EXTRA_PLUGINS = [ 'planetoid', 'ipynb', 'local_search', 'render_mustache', ] def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_month = defaultdict(list) self.posts_per_tag = defaultdict(list) self.posts_per_category = defaultdict(list) self.post_per_file = {} self.timeline = [] self.pages = [] self._scanned = False self._template_system = None self._THEMES = None if not config: self.configured = False else: self.configured = True # This is the default config self.config = { 'ADD_THIS_BUTTONS': True, 'ARCHIVE_PATH': "", 'ARCHIVE_FILENAME': "archive.html", 'BODY_END': "", 'CACHE_FOLDER': 'cache', 'CODE_COLOR_SCHEME': 'default', 'COMMENT_SYSTEM': 'disqus', 'COMMENTS_IN_GALLERIES': False, 'COMMENTS_IN_STORIES': False, 'COMPILERS': { "rest": ('.txt', '.rst'), "markdown": ('.md', '.mdown', '.markdown'), "textile": ('.textile', ), "txt2tags": ('.t2t', ), "bbcode": ('.bb', ), "wiki": ('.wiki', ), "ipynb": ('.ipynb', ), "html": ('.html', '.htm') }, 'CONTENT_FOOTER': '', 'COPY_SOURCES': True, 'CREATE_MONTHLY_ARCHIVE': False, 'DATE_FORMAT': '%Y-%m-%d %H:%M', 'DEFAULT_LANG': "en", 'DEPLOY_COMMANDS': [], 'DISABLED_PLUGINS': (), 'COMMENT_SYSTEM_ID': 'nikolademo', 'ENABLED_EXTRAS': (), 'EXTRA_HEAD_DATA': '', 'FAVICONS': {}, 'FEED_LENGTH': 10, 'FILE_METADATA_REGEXP': None, 'ADDITIONAL_METADATA': {}, 'FILES_FOLDERS': { 'files': '' }, 'FILTERS': {}, 'GALLERY_PATH': 'galleries', 'GZIP_FILES': False, 'GZIP_EXTENSIONS': ('.txt', '.htm', '.html', '.css', '.js', '.json'), 'HIDE_SOURCELINK': False, 'HIDE_UNTRANSLATED_POSTS': False, 'HYPHENATE': False, 'INDEX_DISPLAY_POST_COUNT': 10, 'INDEX_FILE': 'index.html', 'INDEX_TEASERS': False, 'INDEXES_TITLE': "", 'INDEXES_PAGES': "", 'INDEX_PATH': '', 'LICENSE': '', 'LINK_CHECK_WHITELIST': [], 'LISTINGS_FOLDER': 'listings', 'NAVIGATION_LINKS': None, 'MARKDOWN_EXTENSIONS': ['fenced_code', 'codehilite'], 'MAX_IMAGE_SIZE': 1280, 'MATHJAX_CONFIG': '', 'OLD_THEME_SUPPORT': True, 'OUTPUT_FOLDER': 'output', 'POSTS': (("posts/*.txt", "posts", "post.tmpl"), ), 'PAGES': (("stories/*.txt", "stories", "story.tmpl"), ), 'PRETTY_URLS': False, 'FUTURE_IS_NOW': False, 'READ_MORE_LINK': '<p class="more"><a href="{link}">{read_more}…</a></p>', 'REDIRECTIONS': [], 'RSS_LINK': None, 'RSS_PATH': '', 'RSS_TEASERS': True, 'SEARCH_FORM': '', 'SLUG_TAG_PATH': True, 'SOCIAL_BUTTONS_CODE': SOCIAL_BUTTONS_CODE, 'STORY_INDEX': False, 'STRIP_INDEXES': False, 'SITEMAP_INCLUDE_FILELESS_DIRS': True, 'TAG_PATH': 'categories', 'TAG_PAGES_ARE_INDEXES': False, 'THEME': 'bootstrap', 'THEME_REVEAL_CONFIG_SUBTHEME': 'sky', 'THEME_REVEAL_CONFIG_TRANSITION': 'cube', 'THUMBNAIL_SIZE': 180, 'USE_BUNDLES': True, 'USE_CDN': False, 'USE_FILENAME_AS_TITLE': True, 'TIMEZONE': None, 'DEPLOY_DRAFTS': True, 'DEPLOY_FUTURE': False, 'SCHEDULE_ALL': False, 'SCHEDULE_RULE': '', 'SCHEDULE_FORCE_TODAY': False } self.config.update(config) # Make sure we have pyphen installed if we are using it if self.config.get('HYPHENATE') and pyphen is None: print('WARNING: To use the hyphenation, you have to install ' 'the "pyphen" package.') print('WARNING: Setting HYPHENATE to False.') self.config['HYPHENATE'] = False # Deprecating post_compilers # TODO: remove on v7 if 'post_compilers' in config: print( "WARNING: The post_compilers option is deprecated, use COMPILERS instead." ) if 'COMPILERS' in config: print( "WARNING: COMPILERS conflicts with post_compilers, ignoring post_compilers." ) else: self.config['COMPILERS'] = config['post_compilers'] # Deprecating post_pages # TODO: remove on v7 if 'post_pages' in config: print( "WARNING: The post_pages option is deprecated, use POSTS and PAGES instead." ) if 'POSTS' in config or 'PAGES' in config: print( "WARNING: POSTS and PAGES conflict with post_pages, ignoring post_pages." ) else: self.config['POSTS'] = [ item[:3] for item in config['post_pages'] if item[-1] ] self.config['PAGES'] = [ item[:3] for item in config['post_pages'] if not item[-1] ] # FIXME: Internally, we still use post_pages because it's a pain to change it self.config['post_pages'] = [] for i1, i2, i3 in self.config['POSTS']: self.config['post_pages'].append([i1, i2, i3, True]) for i1, i2, i3 in self.config['PAGES']: self.config['post_pages'].append([i1, i2, i3, False]) # Deprecating DISQUS_FORUM # TODO: remove on v7 if 'DISQUS_FORUM' in config: print( "WARNING: The DISQUS_FORUM option is deprecated, use COMMENT_SYSTEM_ID instead." ) if 'COMMENT_SYSTEM_ID' in config: print( "WARNING: DISQUS_FORUM conflicts with COMMENT_SYSTEM_ID, ignoring DISQUS_FORUM." ) else: self.config['COMMENT_SYSTEM_ID'] = config['DISQUS_FORUM'] # Deprecating the ANALYTICS option # TODO: remove on v7 if 'ANALYTICS' in config: print( "WARNING: The ANALYTICS option is deprecated, use BODY_END instead." ) if 'BODY_END' in config: print( "WARNING: ANALYTICS conflicts with BODY_END, ignoring ANALYTICS." ) else: self.config['BODY_END'] = config['ANALYTICS'] # Deprecating the SIDEBAR_LINKS option # TODO: remove on v7 if 'SIDEBAR_LINKS' in config: print( "WARNING: The SIDEBAR_LINKS option is deprecated, use NAVIGATION_LINKS instead." ) if 'NAVIGATION_LINKS' in config: print( "WARNING: The SIDEBAR_LINKS conflicts with NAVIGATION_LINKS, ignoring SIDEBAR_LINKS." ) else: self.config['NAVIGATION_LINKS'] = config['SIDEBAR_LINKS'] # Compatibility alias self.config['SIDEBAR_LINKS'] = self.config['NAVIGATION_LINKS'] if self.config['NAVIGATION_LINKS'] in (None, {}): self.config['NAVIGATION_LINKS'] = {self.config['DEFAULT_LANG']: ()} # Deprecating the ADD_THIS_BUTTONS option # TODO: remove on v7 if 'ADD_THIS_BUTTONS' in config: print( "WARNING: The ADD_THIS_BUTTONS option is deprecated, use SOCIAL_BUTTONS_CODE instead." ) if not config['ADD_THIS_BUTTONS']: print( "WARNING: Setting SOCIAL_BUTTONS_CODE to empty because ADD_THIS_BUTTONS is False." ) self.config['SOCIAL_BUTTONS_CODE'] = '' # STRIP_INDEX_HTML config has been replaces with STRIP_INDEXES # Port it if only the oldef form is there # TODO: remove on v7 if 'STRIP_INDEX_HTML' in config and 'STRIP_INDEXES' not in config: print( "WARNING: You should configure STRIP_INDEXES instead of STRIP_INDEX_HTML" ) self.config['STRIP_INDEXES'] = config['STRIP_INDEX_HTML'] # PRETTY_URLS defaults to enabling STRIP_INDEXES unless explicitly disabled if config.get('PRETTY_URLS', False) and 'STRIP_INDEXES' not in config: self.config['STRIP_INDEXES'] = True if config.get('COPY_SOURCES') and not self.config['HIDE_SOURCELINK']: self.config['HIDE_SOURCELINK'] = True self.config['TRANSLATIONS'] = self.config.get( 'TRANSLATIONS', {self.config['DEFAULT_LANG']: ''}) # SITE_URL is required, but if the deprecated BLOG_URL # is available, use it and warn # TODO: remove on v7 if 'SITE_URL' not in self.config: if 'BLOG_URL' in self.config: print( "WARNING: You should configure SITE_URL instead of BLOG_URL" ) self.config['SITE_URL'] = self.config['BLOG_URL'] self.default_lang = self.config['DEFAULT_LANG'] self.translations = self.config['TRANSLATIONS'] # BASE_URL defaults to SITE_URL if 'BASE_URL' not in self.config: self.config['BASE_URL'] = self.config.get('SITE_URL') # BASE_URL should *always* end in / if self.config['BASE_URL'] and self.config['BASE_URL'][-1] != '/': print("WARNING: Your BASE_URL doesn't end in / -- adding it.") self.plugin_manager = PluginManager( categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, "TaskMultiplier": TaskMultiplier, "RestExtension": RestExtension, }) self.plugin_manager.setPluginInfoExtension('plugin') if sys.version_info[0] == 3: places = [ os.path.join(os.path.dirname(__file__), 'plugins'), os.path.join(os.getcwd(), 'plugins'), ] else: places = [ os.path.join(os.path.dirname(__file__), utils.sys_encode('plugins')), os.path.join(os.getcwd(), utils.sys_encode('plugins')), ] self.plugin_manager.setPluginPlaces(places) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("Command"): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory( plugin_info, "Command") continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) plugin_info.plugin_object.short_help = plugin_info.description self.commands[plugin_info.name] = plugin_info.plugin_object # Activate all task plugins for task_type in ["Task", "LateTask"]: for plugin_info in self.plugin_manager.getPluginsOfCategory( task_type): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory( plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all multiplier plugins for plugin_info in self.plugin_manager.getPluginsOfCategory( "TaskMultiplier"): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory( plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all required compiler plugins for plugin_info in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): if plugin_info.name in self.config["COMPILERS"].keys(): self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # set global_context for template rendering self._GLOBAL_CONTEXT = {} self._GLOBAL_CONTEXT['_link'] = self.link self._GLOBAL_CONTEXT['set_locale'] = s_l self._GLOBAL_CONTEXT['rel_link'] = self.rel_link self._GLOBAL_CONTEXT['abs_link'] = self.abs_link self._GLOBAL_CONTEXT['exists'] = self.file_exists self._GLOBAL_CONTEXT['SLUG_TAG_PATH'] = self.config['SLUG_TAG_PATH'] self._GLOBAL_CONTEXT['index_display_post_count'] = self.config[ 'INDEX_DISPLAY_POST_COUNT'] self._GLOBAL_CONTEXT['use_bundles'] = self.config['USE_BUNDLES'] self._GLOBAL_CONTEXT['use_cdn'] = self.config.get("USE_CDN") self._GLOBAL_CONTEXT['favicons'] = self.config['FAVICONS'] self._GLOBAL_CONTEXT['date_format'] = self.config.get( 'DATE_FORMAT', '%Y-%m-%d %H:%M') self._GLOBAL_CONTEXT['blog_author'] = self.config.get('BLOG_AUTHOR') self._GLOBAL_CONTEXT['blog_title'] = self.config.get('BLOG_TITLE') # TODO: remove fallback in v7 self._GLOBAL_CONTEXT['blog_url'] = self.config.get( 'SITE_URL', self.config.get('BLOG_URL')) self._GLOBAL_CONTEXT['blog_desc'] = self.config.get('BLOG_DESCRIPTION') self._GLOBAL_CONTEXT['body_end'] = self.config.get('BODY_END') # TODO: remove in v7 self._GLOBAL_CONTEXT['analytics'] = self.config.get('BODY_END') # TODO: remove in v7 self._GLOBAL_CONTEXT['add_this_buttons'] = self.config.get( 'SOCIAL_BUTTONS_CODE') self._GLOBAL_CONTEXT['social_buttons_code'] = self.config.get( 'SOCIAL_BUTTONS_CODE') self._GLOBAL_CONTEXT['translations'] = self.config.get('TRANSLATIONS') self._GLOBAL_CONTEXT['license'] = self.config.get('LICENSE') self._GLOBAL_CONTEXT['search_form'] = self.config.get('SEARCH_FORM') self._GLOBAL_CONTEXT['comment_system'] = self.config.get( 'COMMENT_SYSTEM') self._GLOBAL_CONTEXT['comment_system_id'] = self.config.get( 'COMMENT_SYSTEM_ID') # TODO: remove in v7 self._GLOBAL_CONTEXT['disqus_forum'] = self.config.get( 'COMMENT_SYSTEM_ID') self._GLOBAL_CONTEXT['mathjax_config'] = self.config.get( 'MATHJAX_CONFIG') self._GLOBAL_CONTEXT['subtheme'] = self.config.get( 'THEME_REVEAL_CONFIG_SUBTHEME') self._GLOBAL_CONTEXT['transition'] = self.config.get( 'THEME_REVEAL_CONFIG_TRANSITION') self._GLOBAL_CONTEXT['content_footer'] = self.config.get( 'CONTENT_FOOTER') self._GLOBAL_CONTEXT['rss_path'] = self.config.get('RSS_PATH') self._GLOBAL_CONTEXT['rss_link'] = self.config.get('RSS_LINK') self._GLOBAL_CONTEXT['navigation_links'] = utils.Functionary( list, self.config['DEFAULT_LANG']) for k, v in self.config.get('NAVIGATION_LINKS', {}).items(): self._GLOBAL_CONTEXT['navigation_links'][k] = v # TODO: remove on v7 # Compatibility alias self._GLOBAL_CONTEXT['sidebar_links'] = self._GLOBAL_CONTEXT[ 'navigation_links'] self._GLOBAL_CONTEXT['twitter_card'] = self.config.get( 'TWITTER_CARD', {}) self._GLOBAL_CONTEXT['hide_sourcelink'] = self.config.get( 'HIDE_SOURCELINK') self._GLOBAL_CONTEXT['extra_head_data'] = self.config.get( 'EXTRA_HEAD_DATA') self._GLOBAL_CONTEXT.update(self.config.get('GLOBAL_CONTEXT', {})) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for plugin_info in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): self.compilers[plugin_info.name] = \ plugin_info.plugin_object def _get_themes(self): if self._THEMES is None: # Check for old theme names (Issue #650) TODO: remove in v7 theme_replacements = { 'site': 'bootstrap', 'orphan': 'base', 'default': 'oldfashioned', } if self.config['THEME'] in theme_replacements: warnings.warn( 'You are using the old theme "{0}", using "{1}" instead.'. format(self.config['THEME'], theme_replacements[self.config['THEME']])) self.config['THEME'] = theme_replacements[self.config['THEME']] if self.config['THEME'] == 'oldfashioned': warnings.warn( '''You may need to install the "oldfashioned" theme ''' '''from themes.nikola.ralsina.com.ar because it's not ''' '''shipped by default anymore.''') warnings.warn('Please change your THEME setting.') try: self._THEMES = utils.get_theme_chain(self.config['THEME']) except Exception: warnings.warn( '''Can't load theme "{0}", using 'bootstrap' instead.'''. format(self.config['THEME'])) self.config['THEME'] = 'bootstrap' return self._get_themes() # Check consistency of USE_CDN and the current THEME (Issue #386) if self.config['USE_CDN']: bootstrap_path = utils.get_asset_path( os.path.join('assets', 'css', 'bootstrap.min.css'), self._THEMES) if bootstrap_path and bootstrap_path.split( os.sep)[-4] not in ['bootstrap', 'bootstrap3']: warnings.warn( 'The USE_CDN option may be incompatible with your theme, because it uses a hosted version of bootstrap.' ) return self._THEMES THEMES = property(_get_themes) def _get_messages(self): return utils.load_messages(self.THEMES, self.translations, self.default_lang) MESSAGES = property(_get_messages) def _get_global_context(self): """Initialize some parts of GLOBAL_CONTEXT only when it's queried.""" if 'messages' not in self._GLOBAL_CONTEXT: self._GLOBAL_CONTEXT['messages'] = self.MESSAGES if 'has_custom_css' not in self._GLOBAL_CONTEXT: # check if custom css exist and is not empty custom_css_path = utils.get_asset_path( 'assets/css/custom.css', self.THEMES, self.config['FILES_FOLDERS']) if custom_css_path and self.file_exists(custom_css_path, not_empty=True): self._GLOBAL_CONTEXT['has_custom_css'] = True else: self._GLOBAL_CONTEXT['has_custom_css'] = False return self._GLOBAL_CONTEXT GLOBAL_CONTEXT = property(_get_global_context) def _get_template_system(self): if self._template_system is None: # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName(template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading {0} template system " "plugin\n".format(template_sys_name)) sys.exit(1) self._template_system = pi.plugin_object lookup_dirs = ['templates'] + [ os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES ] self._template_system.set_directories(lookup_dirs, self.config['CACHE_FOLDER']) return self._template_system template_system = property(_get_template_system) def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.COMPILERS` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [ lang for lang, exts in list(self.config['COMPILERS'].items()) if ext in exts ] if len(langs) != 1: if len(set(langs)) > 1: exit("Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'COMPILERS' in conf.py\n(The error is in" "one of {0})".format(', '.join(langs))) elif len(langs) > 1: langs = langs[:1] else: exit("COMPILERS in conf.py does not tell me how to " "handle '{0}' extensions.".format(ext)) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.GLOBAL_CONTEXT) local_context.update(context) data = self.template_system.render_template(template_name, None, local_context) assert output_name.startswith(self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1:] # Treat our site as if output/ is "/" and then make all URLs relative, # making the site "relocatable" src = os.sep + url_part src = os.path.normpath(src) # The os.sep is because normpath will change "/" to "\" on windows src = "/".join(src.split(os.sep)) parsed_src = urlsplit(src) src_elems = parsed_src.path.split('/')[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == 'link': # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip('/'), context['lang']) else: return dst # Normalize dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b'<!DOCTYPE html>' + lxml.html.tostring(doc, encoding='utf8') with open(output_name, "wb+") as post_file: post_file.write(data) def current_lang(self): # FIXME: this is duplicated, turn into a mixin """Return the currently set locale, if it's one of the available translations, or default_lang.""" lang = utils.LocaleBorg().current_lang if lang: if lang in self.translations: return lang lang = lang.split('_')[0] if lang in self.translations: return lang # whatever return self.default_lang def path(self, kind, name, lang=None, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * category (and name is the category name) * category_rss (and name is the category name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) * post_path (name is 1st element in a POSTS/PAGES tuple) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ if lang is None: lang = self.current_lang() path = [] if kind == "tag_index": path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], self.config['INDEX_FILE'] ] if _f ] elif kind == "tag": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".html" ] if _f ] elif kind == "category": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], "cat_" + name + ".html" ] if _f ] elif kind == "tag_rss": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".xml" ] if _f ] elif kind == "category_rss": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], "cat_" + name + ".xml" ] if _f ] elif kind == "index": if name not in [None, 0]: path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self. config['INDEX_PATH'], 'index-{0}.html'.format(name) ] if _f ] else: path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], self.config['INDEX_FILE'] ] if _f ] elif kind == "post_path": path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], os.path.dirname(name), self.config['INDEX_FILE'] ] if _f ] elif kind == "rss": path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self.config['RSS_PATH'], 'rss.xml' ] if _f ] elif kind == "archive": if name: path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self. config['ARCHIVE_PATH'], name, self.config['INDEX_FILE'] ] if _f ] else: path = [ _f for _f in [ self.config['TRANSLATIONS'][lang], self. config['ARCHIVE_PATH'], self.config['ARCHIVE_FILENAME'] ] if _f ] elif kind == "gallery": path = [ _f for _f in [self.config['GALLERY_PATH'], name, self.config['INDEX_FILE']] if _f ] elif kind == "listing": path = [ _f for _f in [self.config['LISTINGS_FOLDER'], name + '.html'] if _f ] if is_link: link = '/' + ('/'.join(path)) index_len = len(self.config['INDEX_FILE']) if self.config['STRIP_INDEXES'] and \ link[-(1 + index_len):] == '/' + self.config['INDEX_FILE']: return link[:-index_len] else: return link else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config['BASE_URL'], dst) return urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urljoin(self.config['BASE_URL'], src) dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split('/')[1:] dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self, name, plugin_category): def flatten(task): if isinstance(task, dict): yield task else: for t in task: for ft in flatten(t): yield ft task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory( plugin_category): for task in flatten(pluginInfo.plugin_object.gen_tasks()): yield task for multi in self.plugin_manager.getPluginsOfCategory( "TaskMultiplier"): flag = False for task in multi.plugin_object.process(task, name): flag = True yield task if flag: task_dep.append('{0}_{1}'.format( name, multi.plugin_object.name)) if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield { 'name': name, 'actions': None, 'clean': True, 'task_dep': task_dep } def scan_posts(self): """Scan all the posts.""" if self._scanned: return seen = set([]) print("Scanning posts", end='') tzinfo = None if self.config['TIMEZONE'] is not None: tzinfo = pytz.timezone(self.config['TIMEZONE']) if self.config['FUTURE_IS_NOW']: current_time = None else: current_time = utils.current_time(tzinfo) targets = set([]) for wildcard, destination, template_name, use_in_feeds in \ self.config['post_pages']: print(".", end='') dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.normpath( os.path.join(destination, os.path.relpath(dirpath, dirname))) full_list = glob.glob(dir_glob) # Now let's look for things that are not in default_lang for lang in self.config['TRANSLATIONS'].keys(): lang_glob = dir_glob + "." + lang translated_list = glob.glob(lang_glob) for fname in translated_list: orig_name = os.path.splitext(fname)[0] if orig_name in full_list: continue full_list.append(orig_name) # We eliminate from the list the files inside any .ipynb folder full_list = [ p for p in full_list if not any([x.startswith('.') for x in p.split(os.sep)]) ] for base_path in full_list: if base_path in seen: continue else: seen.add(base_path) post = Post( base_path, self.config['CACHE_FOLDER'], dest_dir, use_in_feeds, self.config['TRANSLATIONS'], self.config['DEFAULT_LANG'], self.config['BASE_URL'], self.MESSAGES, template_name, self.config['FILE_METADATA_REGEXP'], self.config['STRIP_INDEXES'], self.config['INDEX_FILE'], tzinfo, current_time, self.config['HIDE_UNTRANSLATED_POSTS'], self.config['PRETTY_URLS'], self.config['HYPHENATE'], ) for lang, langpath in list( self.config['TRANSLATIONS'].items()): dest = (destination, langpath, dir_glob, post.meta[lang]['slug']) if dest in targets: raise Exception('Duplicated output path {0!r} ' 'in post {1!r}'.format( post.meta[lang]['slug'], base_path)) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[str(post.date.year)].append( post.post_name) self.posts_per_month['{0}/{1:02d}'.format( post.date.year, post.date.month)].append(post.post_name) for tag in post.alltags: self.posts_per_tag[tag].append(post.post_name) self.posts_per_category[post.meta('category')].append( post.post_name) else: self.pages.append(post) if self.config['OLD_THEME_SUPPORT']: post._add_old_metadata() self.post_per_file[post.destination_path(lang=lang)] = post self.post_per_file[post.destination_path( lang=lang, extension=post.source_ext())] = post for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!") def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + \ self.template_system.template_deps(post.template_name) context['post'] = post context['lang'] = lang context['title'] = post.title(lang) context['description'] = post.description(lang) context['permalink'] = post.permalink(lang) context['page_list'] = self.pages if post.use_in_feeds: context['enable_comments'] = True else: context['enable_comments'] = self.config['COMMENTS_IN_STORIES'] extension = self.get_compiler(post.source_path).extension() output_name = os.path.join(self.config['OUTPUT_FOLDER'], post.destination_path(lang, extension)) deps_dict = copy(context) deps_dict.pop('post') if post.prev_post: deps_dict['PREV_LINK'] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict['NEXT_LINK'] = [post.next_post.permalink(lang)] deps_dict['OUTPUT_FOLDER'] = self.config['OUTPUT_FOLDER'] deps_dict['TRANSLATIONS'] = self.config['TRANSLATIONS'] deps_dict['global'] = self.GLOBAL_CONTEXT deps_dict['comments'] = context['enable_comments'] if post: deps_dict['post_translations'] = post.translated_to task = { 'name': os.path.normpath(output_name), 'file_dep': deps, 'targets': [output_name], 'actions': [(self.render_template, [post.template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config['BLOG_TITLE'] context["description"] = self.config['BLOG_DESCRIPTION'] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.meta[lang]['title'], p.permalink(lang)) for p in posts] deps_context["global"] = self.GLOBAL_CONTEXT task = { 'name': os.path.normpath(output_name), 'targets': [output_name], 'file_dep': deps, 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_context)] } return utils.apply_filters(task, filters)
class NimbusPI(object): """The NimbusPi Weather Station""" # Current NimbusPI Version VERSION = "0.1.0-rc1" def __init__(self, config='nimbus.cfg'): """Initializes the NimbusPI Weather Station""" self.sensors = dict() self.broadcasters = dict() self.threads = [] # Initialize a named logger self.__logger = logging.getLogger('nimbuspi') # Load our config defaults self.config = configparser.ConfigParser(allow_no_value=True) self.config.add_section('station') self.config.set('station', 'name', 'N/A') self.config.set('station', 'location', 'N/A') self.config.set('station', 'longitude', '0.000000') self.config.set('station', 'latitude', '0.000000') self.config.set('station', 'altitude', '0') self.config.add_section('sensors') self.config.add_section('broadcasters') # Load the provided config file if not os.path.isfile(config): self.__logger.warn('Configuration file "%s" not found!', config) else: self.__logger.debug('Loading configuration from "%s"', config) self.config.read(config) # Get our station details self.__logger.debug(' name :: %s', self.config.get('station', 'name')) self.__logger.debug(' location :: %s', self.config.get('station', 'location')) self.__logger.debug(' latitude :: %s', self.config.get('station', 'latitude')) self.__logger.debug(' longitude :: %s', self.config.get('station', 'longitude')) self.__logger.debug(' altitude :: %s feet', self.config.get('station', 'altitude')) self.__logger.debug('Sensors Configured:') for sensor in self.config.options('sensors'): self.__logger.debug(' %s', sensor) self.__logger.debug('Broadcasters Configured:') for broadcaster in self.config.options('broadcasters'): self.__logger.debug(' %s', broadcaster) # Search for available plugins self.__logger.debug("Searching for available plugins...") self.__plugins = PluginManager(plugin_info_ext='info') self.__plugins.setPluginPlaces([ './sensors', './broadcasters', './nimbuspi/sensors', './nimbuspi/broadcasters' ]) self.__plugins.setCategoriesFilter({ plugins.ISensorPlugin.CATEGORY: plugins.ISensorPlugin, plugins.IBroadcasterPlugin.CATEGORY: plugins.IBroadcasterPlugin }) self.__plugins.collectPlugins() for plugin in self.__plugins.getAllPlugins(): self.__logger.debug(" %s (%s)", plugin.name, plugin.path) plugin.plugin_object.set_nimbus(self) self.__logger.debug("%d plugins available", len(self.__plugins.getAllPlugins())) def run(self): """Runs the NimbusPI Weather Station loop""" self.__logger.debug('-' * 80) self.__logger.info('NimbusPI Weather Station v%s', self.VERSION) self.__logger.info('-' * 80) # Load all configured sensor plugins self.__logger.info("Activating sensor plugins...") for sensor in self.config.options('sensors'): try: self.activate_sensor(sensor) except LookupError: self.__logger.error("Could not load sensor '%s'", sensor) return if len(self.sensors) <= 0: self.__logger.error('Cannot continue - no sensors configured') return # Load all configured broadcaster plugins self.__logger.info("Activating broadcaster plugins...") for broadcaster in self.config.options('broadcasters'): try: self.activate_broadcaster(broadcaster) except LookupError: self.__logger.error("Could not load broadcaster '%s'", broadcaster) return if len(self.broadcasters) <= 0: self.__logger.error('Cannot continue - no broadcasters configured') return # # Thread run loop until keyboard interrupt self.__logger.debug("Entering thread loop") while len(self.threads) > 0: try: self.threads = [ t.join(30) for t in self.threads if t is not None and t.isAlive() ] except (KeyboardInterrupt, SystemExit): self.__logger.info( "Shutting down plugins (this may take a minute)...") for thread in self.threads: thread.stop() self.__logger.debug("Exiting thread loop") # Deactivate plugins self.__logger.debug("Deactivating sensors") sensors = self.sensors.keys() for sensor in sensors: self.deactivate_sensor(sensor) self.__logger.debug("Deactivating broadcasters") broadcasters = self.broadcasters.keys() for broadcaster in broadcasters: self.deactivate_broadcaster(broadcaster) def activate_sensor(self, sensor): """Activates a sensor on the service""" if sensor in self.sensors: self.__logger.warn( "Cannot activate sensor '%s' - sensor already active", sensor) return False self.__logger.debug("Activating sensor '%s'", sensor) self.sensors[sensor] = self.__plugins.getPluginByName( sensor, plugins.ISensorPlugin.CATEGORY) if not self.sensors[sensor]: raise LookupError self.__plugins.activatePluginByName(sensor, plugins.ISensorPlugin.CATEGORY) self.threads.append(self.sensors[sensor].plugin_object.thread) return True def deactivate_sensor(self, sensor): """Deactivates a sensor on the service""" if sensor not in self.sensors: self.__logger.warn( "Cannot deactivate sensor '%s' - sensor not active", sensor) return False self.__logger.debug("Deactivating sensor '%s'", sensor) if self.sensors[sensor].plugin_object.thread: self.sensors[sensor].plugin_object.thread.stop() self.__plugins.deactivatePluginByName(sensor, plugins.ISensorPlugin.CATEGORY) del self.sensors[sensor] return True def activate_broadcaster(self, broadcaster): """Activates a broadcaster on the service""" if broadcaster in self.broadcasters: self.__logger.warn( "Cannot activate broadcaster '%s' - broadcaster already active", broadcaster) return False self.__logger.debug("Activating broadcaster '%s'", broadcaster) self.broadcasters[broadcaster] = self.__plugins.getPluginByName( broadcaster, plugins.IBroadcasterPlugin.CATEGORY) if not self.broadcasters[broadcaster]: raise LookupError self.__plugins.activatePluginByName( broadcaster, plugins.IBroadcasterPlugin.CATEGORY) self.threads.append( self.broadcasters[broadcaster].plugin_object.thread) return True def deactivate_broadcaster(self, broadcaster): """Deactivates a broadcaster on the service""" if broadcaster not in self.broadcasters: self.__logger.warn( "Cannot deactivate broadcaster '%s' - broadcaster not active", broadcaster) return False self.__logger.debug("Deactivating broadcaster '%s'", broadcaster) if self.broadcasters[broadcaster].plugin_object.thread: self.broadcasters[broadcaster].plugin_object.thread.stop() self.__plugins.deactivatePluginByName( broadcaster, plugins.IBroadcasterPlugin.CATEGORY) del self.broadcasters[broadcaster] return True def get_states(self): """Returns the current state of all sensors""" states = dict() # Add our station configuration information as well states['config'] = dict() for option in self.config.options('station'): states['config'][option] = self.config.get('station', option) # Add all current plugin states for sensor in self.sensors: states[sensor] = self.sensors[sensor].plugin_object.get_state() return states
class Paratest(object): def __init__(self, workspace_num, scripts, source_path, workspace_path, output_path, test_pattern, persistence): self.workspace_num = workspace_num self.workspace_path = workspace_path self.scripts = scripts self.source_path = source_path self.output_path = output_path self.test_pattern = test_pattern self._workers = [] self.pluginmgr = PluginManager() self.pluginmgr.setPluginInfoExtension('paratest') self.pluginmgr.setPluginPlaces(["plugins", ""]) self.pluginmgr.collectPlugins() self.persistence = persistence if not os.path.exists(self.source_path): os.makedirs(self.source_path) if not os.path.exists(self.output_path): os.makedirs(self.output_path) def list_plugins(self): msg = "Available plugins are:\n" for plugin in self.pluginmgr.getAllPlugins(): msg += " %s" % plugin.name print(msg) def run(self, plugin): plugin = self.pluginmgr.getPluginByName(plugin) pluginobj = plugin.plugin_object self.run_script_setup() test_number = self.queue_tests(pluginobj) self.create_workers(pluginobj, self.num_of_workers(test_number)) self.start_workers() self.wait_workers() self.run_script_teardown() self.assert_all_messages_were_processed() def run_script_setup(self): if run_script(self.scripts.setup, path=self.workspace_path): raise Abort('The setup script failed. aborting.') def run_script_teardown(self): if run_script(self.scripts.teardown, path=self.workspace_path): raise Abort('The teardown script failed, but nothing can be done.') def queue_tests(self, pluginobj): tids = 0 for tid in pluginobj.find(self.source_path, self.test_pattern): shared_queue.put((self.persistence.get_priority(tid), tid)) tids += 1 return tids def create_workers(self, pluginobj, workers): for i in range(workers): t = Worker( pluginobj, scripts=self.scripts, workspace_path=self.workspace_path, source_path=self.source_path, output_path=self.output_path, persistence=self.persistence, name=str(i), ) self._workers.append(t) def num_of_workers(self, test_number): return min(self.workspace_num, test_number) def start_workers(self): logger.debug("start workers") for t in self._workers: t.start() shared_queue.put((INFINITE, FINISH)) def wait_workers(self): logger.debug("wait for all workers to finish") for t in self._workers: t.join() def assert_all_messages_were_processed(self): if not shared_queue.empty(): raise Abort('There were unprocessed tests, but all workers are dead. Aborting.')
class GeneManager(object): """ Merge data available in ?elasticsearch into proper json objects plugin_paths is a collection of filesystem paths to search for potential plugins plugin_names is an ordered collection of class names of plugins which determines the order they are handled in """ def __init__(self, es_hosts, es_index, es_mappings, es_settings, plugin_paths, plugin_order, data_config, es_config, workers_write, queue_write): self.es_hosts = es_hosts self.es_index = es_index self.es_mappings = es_mappings self.es_settings = es_settings self.plugin_order = plugin_order self.data_config = data_config self.es_config = es_config self.workers_write = workers_write self.queue_write = queue_write self.genes = GeneSet() self._logger = logging.getLogger(__name__) self._logger.debug("Preparing the plug in management system") # Build the manager self.simplePluginManager = PluginManager() # Tell it the default place(s) where to find plugins self.simplePluginManager.setPluginPlaces(plugin_paths) for dir in plugin_paths: self._logger.debug("Looking for plugins in %s", dir) # Load all plugins self.simplePluginManager.collectPlugins() def merge_all(self, dry_run): es = new_es_client(self.es_hosts) #run the actual plugins for plugin_name in self.plugin_order: plugin = self.simplePluginManager.getPluginByName(plugin_name) # TODO remove the former redis object from all plugins plugin.plugin_object.merge_data(self.genes, es, None, self.data_config, self.es_config) with URLZSource(self.es_mappings).open() as mappings_file: mappings = json.load(mappings_file) with URLZSource(self.es_settings).open() as settings_file: settings = json.load(settings_file) # Hot fix issue 643: missing pathway in the association. Need a review for the reactome functions for geneid, gene in self.genes.iterate(): gene._create_suggestions() gene._create_facets() with ElasticsearchBulkIndexManager(es, self.es_index, settings, mappings): #write into elasticsearch chunk_size = 1000 #TODO make configurable actions = elasticsearch_actions(self.genes, self.es_index) failcount = 0 if not dry_run: results = None if self.workers_write > 0: results = elasticsearch.helpers.parallel_bulk( es, actions, thread_count=self.workers_write, queue_size=self.queue_write, chunk_size=chunk_size) else: results = elasticsearch.helpers.streaming_bulk( es, actions, chunk_size=chunk_size) for success, details in results: if not success: failcount += 1 if failcount: raise RuntimeError("%s relations failed to index" % failcount) """ Run a series of QC tests on EFO elasticsearch index. Returns a dictionary of string test names and result objects """ def qc(self, es, index): #number of gene entries gene_count = 0 #Note: try to avoid doing this more than once! for gene_entry in Search().using(es).index(index).query( MatchAll()).scan(): gene_count += 1 #put the metrics into a single dict metrics = dict() metrics["gene.count"] = gene_count return metrics
class WeaponSystem(rpyc.Service): ''' RPC Services: This is the code that does the actual password cracking and returns the results to orbital control. Currently only supports cracking using rainbow tables (RCrackPy) ''' is_initialized = False mutex = Lock() is_busy = False job_id = None def initialize(self): ''' Initializes variables, this should only be called once ''' logging.info("Weapon system initializing ...") self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces(["plugins/"]) self.plugin_manager.setCategoriesFilter(FILTERS) self.plugin_manager.collectPlugins() self.plugins = {} logging.info("Loaded %d plugin(s)" % len(self.plugin_manager.getAllPlugins())) self.__cpu__() logging.info("Weapon system online, good hunting.") @atomic def on_connect(self): ''' Called when successfully connected ''' if not self.is_initialized: self.initialize() self.is_initialized = True logging.info("Uplink to orbital control active") def on_disconnect(self): ''' Called if the connection is lost/disconnected ''' logging.info("Disconnected from orbital command server.") def __cpu__(self): ''' Detects the number of CPU cores on a system (including virtual cores) ''' if cpu_count is not None: try: self.cpu_cores = cpu_count() logging.info("Detected %d CPU core(s)" % self.cpu_cores) except NotImplementedError: logging.error( "Could not detect number of processors; assuming 1") self.cpu_cores = 1 else: try: self.cpu_cores = int(sysconf("SC_NPROCESSORS_CONF")) logging.info("Detected %d CPU core(s)" % self.cpu_cores) except ValueError: logging.error( "Could not detect number of processors; assuming 1") self.cpu_cores = 1 ############################ [ EXPOSED METHODS ] ############################ @atomic def exposed_crack(self, plugin_name, job_id, hashes, **kwargs): ''' Exposes plugins calls ''' assert plugin_name in self.plugins self.is_busy = True self.job_id = job_id self.plugin_manager.activatePluginByName(plugin_name) plugin = self.plugin_manager.getPluginByName(plugin_name) results = plugin.execute(hashes, **kwargs) self.plugin_manager.deactivatePluginByName(plugin_name) self.job_id = None self.is_busy = False return results def exposed_get_plugin_names(self): ''' Returns what algorithms can be cracked ''' logging.info("Method called: exposed_get_capabilities") plugins = self.plugin_manager.getAllPlugins() return [plugin.name for plugin in plugins] def exposed_get_categories(self): ''' Return categories for which we have plugins ''' categories = [] for category in self.plugin_manager.getCategories(): if 0 < len(self.plugin_manager.getPluginsOfCategory(category)): categories.append(category) return categories def exposed_get_category_plugins(self, category): ''' Get plugin names for a category ''' plugins = self.plugin_manager.getPluginsOfCategory(category) return [plugin.name for plugin in plugins] def exposed_get_plugin_details(self, category, plugin_name): ''' Get plugin based on name details ''' plugin = self.plugin_manager.getPluginByName(plugin_name, category) info = {'name': plugin.name} info['author'] = plugin.details.get('Documentation', 'author') info['website'] = plugin.details.get('Documentation', 'website') info['version'] = plugin.details.get('Documentation', 'version') info['description'] = plugin.details.get('Documentation', 'description') info['copyright'] = plugin.details.get('Documentation', 'copyright') info['precomputation'] = plugin.details.getboolean( 'Core', 'precomputation') return info def exposed_ping(self): ''' Returns a pong message ''' return "PONG" def exposed_is_busy(self): ''' Returns True/False if the current system is busy (thread safe) ''' return self.is_busy def exposed_current_job_id(self): ''' Returns the current job id (thread safe) ''' return self.job_id def exposed_cpu_count(self): ''' Returns the number of detected cpu cores ''' return self.cpu_cores
class TestDef(object): def __init__(self): # set aside storage for options and cmd line args self.options = {} self.args = [] # record if we have loaded the plugins or # not - this is just a bozo check to ensure # someone doesn't tell us to do it twice self.loaded = False # set aside a spot for a logger object, and # note that it hasn't yet been defined self.logger = None self.modcmd = None self.execmd = None self.harasser = None self.config = None self.stages = None self.tools = None self.utilities = None self.defaults = None self.log = {} self.watchdog = None self.plugin_trans_sem = Semaphore() def setOptions(self, args): self.options = vars(args) self.args = args # private function to convert values def __convert_value(self, opt, inval): if opt is None or type(opt) is str: return 0, inval elif type(opt) is bool: if type(inval) is bool: return 0, inval elif type(inval) is str: if inval.lower() in ['true', '1', 't', 'y', 'yes']: return 0, True else: return 0, False elif type(inval) is int: if 0 == inval: return 0, False else: return 0, True elif is_py2 and type(inval) is unicode: return 0, int(inval) else: # unknown conversion required print("Unknown conversion required for " + inval) return 1, None elif type(opt) is int: if type(inval) is int: return 0, inval elif type(inval) is str: return 0, int(inval) else: # unknown conversion required print("Unknown conversion required for " + inval) return 1, None elif type(opt) is float: if type(inval) is float: return 0, inval elif type(inval) is str or type(inval) is int: return 0, float(inval) else: # unknown conversion required print("Unknown conversion required for " + inval) return 1, None else: return 1, None # scan the key-value pairs obtained from the configuration # parser and compare them with the options defined for a # given plugin. Generate an output dictionary that contains # the updated set of option values, the default value for # any option that wasn't included in the configuration file, # and return an error status plus output identifying any # keys in the configuration file that are not supported # by the list of options # # @log [INPUT] # - a dictionary that will return the status plus # stderr containing strings identifying any # provided keyvals that don't have a corresponding # supported option # @options [INPUT] # - a dictionary of tuples, each consisting of three # entries: # (a) the default value # (b) data type # (c) a help-like description # @keyvals [INPUT] # - a dictionary of key-value pairs obtained from # the configuration parser # @target [OUTPUT] # - the resulting dictionary of key-value pairs def parseOptions(self, log, options, keyvals, target): # parse the incoming keyvals dictionary against the source # options. If a source option isn't provided, then # copy it across to the target. opts = list(options.keys()) kvkeys = list(keyvals.keys()) for opt in opts: found = False for kvkey in kvkeys: if kvkey == opt: # they provided us with an update, so # pass this value into the target - expand # any provided lists if keyvals[kvkey] is None: continue st, outval = self.__convert_value(options[opt][0], keyvals[kvkey]) if 0 == st: target[opt] = outval else: if len(keyvals[kvkey]) == 0: # this indicates they do not want this option found = True break if keyvals[kvkey][0][0] == "[": # they provided a list - remove the brackets val = keyvals[kvkey].replace('[','') val = val.replace(']','') # split the input to pickup sets of options newvals = list(val) # convert the values to specified type i=0 for val in newvals: st, newvals[i] = self.__convert_value(opt[0], val) i = i + 1 target[opt] = newvals else: st, target[opt] = self.__convert_value(opt[0], keyvals[kvkey]) found = True break if not found: # they didn't provide this one, so # transfer only the value across target[opt] = options[opt][0] # add in any default settings that have not # been overridden - anything set by this input # stage will override the default if self.defaults is not None: keys = self.defaults.options.keys() for key in keys: if key not in target: target[key] = self.defaults.options[key][0] # now go thru in the reverse direction to see # if any keyvals they provided aren't supported # as this would be an error unsupported_options = [] for kvkey in kvkeys: # ignore some standard keys if kvkey in ['section', 'plugin']: continue try: if target[kvkey] is not None: pass except KeyError: # some always need to be passed if kvkey in ['parent', 'asis']: target[kvkey] = keyvals[kvkey] else: unsupported_options.append(kvkey) if unsupported_options: sys.exit("ERROR: Unsupported options for section [%s]: %s" % (log['section'], ",".join(unsupported_options))) log['status'] = 0 log['options'] = target return def loadPlugins(self, basedir, topdir): if self.loaded: print("Cannot load plugins multiple times") sys.exit(1) self.loaded = True # find the loader utility so we can bootstrap ourselves try: m = imp.load_source("LoadClasses", os.path.join(basedir, "LoadClasses.py")); except ImportError: print("ERROR: unable to load LoadClasses that must contain the class loader object") sys.exit(1) cls = getattr(m, "LoadClasses") a = cls() # setup the loader object self.loader = a.__class__(); # Setup the array of directories we will search for plugins # Note that we always look at the topdir location by default plugindirs = [] plugindirs.append(topdir) if self.options['plugindir']: # could be a comma-delimited list, so split on commas x = self.options['plugindir'].split(',') for y in x: # prepend so we always look at the given # location first in case the user wants # to "overload/replace" a default MTT # class definition plugindirs.insert(0, y) # Load plugins from each of the specified plugin dirs for dirPath in plugindirs: if not Path(dirPath).exists(): print("Attempted to load plugins from non-existent path:", dirPath) continue try: self.loader.load(dirPath) except Exception as e: print("Exception caught while loading plugins:") print(e) sys.exit(1) # Build plugin managers, # class yapsy.PluginManager.PluginManager(categories_filter=None, # directories_list=None, plugin_info_ext=None, plugin_locator=None) # Build the stages plugin manager self.stages = PluginManager(None, plugindirs, None, None) # Get a list of all the categories - this corresponds to # the MTT stages that have been defined. Note that we # don't need to formally define the stages here - anyone # can add a new stage, or delete an old one, by simply # adding or removing a plugin directory. self.stages.setCategoriesFilter(self.loader.stages) # Load all plugins we find there self.stages.collectPlugins() # Build the tools plugin manager - tools differ from sections # in that they are plugins we will use to execute the various # sections. For example, the TestRun section clearly needs the # ability to launch jobs. There are many ways to launch jobs # depending on the environment, and sometimes several ways to # start jobs even within one environment (e.g., mpirun vs # direct launch). self.tools = PluginManager(None, plugindirs, None, None) # Get the list of tools - not every tool will be capable # of executing. For example, a tool that supports direct launch # against a specific resource manager cannot be used on a # system being managed by a different RM. self.tools.setCategoriesFilter(self.loader.tools) # Load all the tool plugins self.tools.collectPlugins() # Tool plugins are required to provide a function we can # probe to determine if they are capable of operating - check # those now and prune those tools that cannot support this # environment # Build the utilities plugins self.utilities = PluginManager(None, plugindirs, None, None) # Get the list of available utilities. self.utilities.setCategoriesFilter(self.loader.utilities) # Load all the utility plugins self.utilities.collectPlugins() # since we use these all over the place, find the # ExecuteCmd and ModuleCmd plugins and record them availUtil = list(self.loader.utilities.keys()) for util in availUtil: for pluginInfo in self.utilities.getPluginsOfCategory(util): if "ExecuteCmd" == pluginInfo.plugin_object.print_name(): self.execmd = pluginInfo.plugin_object elif "ModuleCmd" == pluginInfo.plugin_object.print_name(): self.modcmd = pluginInfo.plugin_object # initialize this module self.modcmd.setCommand(self.options) elif "Watchdog" == pluginInfo.plugin_object.print_name(): self.watchdog = pluginInfo.plugin_object if self.execmd is not None and self.modcmd is not None and self.watchdog is not None: break if self.execmd is None: print("ExecuteCmd plugin was not found") print("This is a basic capability required") print("for MTT operations - cannot continue") sys.exit(1) # Configure harasser plugin for pluginInfo in self.tools.getPluginsOfCategory("Harasser"): if "Harasser" == pluginInfo.plugin_object.print_name(): self.harasser = pluginInfo.plugin_object break if self.harasser is None: print("Harasser plugin was not found") print("This is required for all TestRun plugins") print("cannot continue") sys.exit(1) # similarly, capture the highest priority defaults stage here pri = -1 for pluginInfo in self.stages.getPluginsOfCategory("MTTDefaults"): if pri < pluginInfo.plugin_object.priority(): self.defaults = pluginInfo.plugin_object pri = pluginInfo.plugin_object.priority() return def printInfo(self): # Print the available MTT sections out, if requested if self.options['listsections']: print("Supported MTT stages:") # print them in the default order of execution for stage in self.loader.stageOrder: print(" " + stage) sys.exit(0) # Print the detected plugins for a given stage if self.options['listplugins']: # if the list is '*', print the plugins for every stage if self.options['listplugins'] == "*": sections = self.loader.stageOrder else: sections = self.options['listplugins'].split(',') print() for section in sections: print(section + ":") try: for pluginInfo in self.stages.getPluginsOfCategory(section): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid stage name " + section) print() sys.exit(1) # Print the options for a given plugin if self.options['liststageoptions']: # if the list is '*', print the options for every stage/plugin if self.options['liststageoptions'] == "*": sections = self.loader.stageOrder else: sections = self.options['liststageoptions'].split(',') print() for section in sections: print(section + ":") try: for pluginInfo in self.stages.getPluginsOfCategory(section): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options(self, " ") except KeyError: print(" Invalid stage name " + section) print() sys.exit(1) # Print the available MTT tools out, if requested if self.options['listtools']: print("Available MTT tools:") availTools = list(self.loader.tools.keys()) for tool in availTools: print(" " + tool) sys.exit(0) # Print the detected tool plugins for a given tool type if self.options['listtoolmodules']: # if the list is '*', print the plugins for every type if self.options['listtoolmodules'] == "*": print() availTools = list(self.loader.tools.keys()) else: availTools = self.options['listtoolmodules'].split(',') print() for tool in availTools: print(tool + ":") try: for pluginInfo in self.tools.getPluginsOfCategory(tool): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid tool type name",tool) print() sys.exit(1) # Print the options for a given plugin if self.options['listtooloptions']: # if the list is '*', print the options for every stage/plugin if self.options['listtooloptions'] == "*": availTools = list(self.loader.tools.keys()) else: availTools = self.options['listtooloptions'].split(',') print() for tool in availTools: print(tool + ":") try: for pluginInfo in self.tools.getPluginsOfCategory(tool): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options(self, " ") except KeyError: print(" Invalid tool type name " + tool) print() sys.exit(1) # Print the available MTT utilities out, if requested if self.options['listutils']: print("Available MTT utilities:") availUtils = list(self.loader.utilities.keys()) for util in availUtils: print(" " + util) sys.exit(0) # Print the detected utility plugins for a given tool type if self.options['listutilmodules']: # if the list is '*', print the plugins for every type if self.options['listutilmodules'] == "*": print() availUtils = list(self.loader.utilities.keys()) else: availUtils = self.options['listutilitymodules'].split(',') print() for util in availUtils: print(util + ":") try: for pluginInfo in self.utilities.getPluginsOfCategory(util): print(" " + pluginInfo.plugin_object.print_name()) except KeyError: print(" Invalid utility type name") print() sys.exit(1) # Print the options for a given plugin if self.options['listutiloptions']: # if the list is '*', print the options for every stage/plugin if self.options['listutiloptions'] == "*": availUtils = list(self.loader.utilities.keys()) else: availUtils = self.options['listutiloptions'].split(',') print() for util in availUtils: print(util + ":") try: for pluginInfo in self.utilities.getPluginsOfCategory(util): print(" " + pluginInfo.plugin_object.print_name() + ":") pluginInfo.plugin_object.print_options(self, " ") except KeyError: print(" Invalid utility type name " + util) print() sys.exit(1) # if they asked for the version info, print it and exit if self.options['version']: for pluginInfo in self.tools.getPluginsOfCategory("Version"): print("MTT Base: " + pluginInfo.plugin_object.getVersion()) print("MTT Client: " + pluginInfo.plugin_object.getClientVersion()) sys.exit(0) def openLogger(self): # there must be a logger utility or we can't do # anything useful if not self.utilities.activatePluginByName("Logger", "Base"): print("Required Logger plugin not found or could not be activated") sys.exit(1) # execute the provided test description self.logger = self.utilities.getPluginByName("Logger", "Base").plugin_object self.logger.open(self) return def fill_log_interpolation(self, basestr, sublog): if isinstance(sublog, str): self.config.set("LOG", basestr, sublog.replace("$","$$")) elif isinstance(sublog, dict): for k,v in sublog.items(): self.fill_log_interpolation("%s.%s" % (basestr, k), v) elif isinstance(sublog, list): if sum([((isinstance(t, list) or isinstance(t, tuple)) and len(t) == 2) for t in sublog]) == len(sublog): self.fill_log_interpolation(basestr, {k:v for k,v in sublog}) else: for i,v in enumerate(sublog): self.fill_log_interpolation("%s.%d" % (basestr, i), v) else: self.fill_log_interpolation(basestr, str(sublog)) def expandWildCards(self, sections): expsec = [] cpsections = list(sections) for sec in cpsections: if '*' in sec: modsec = sec.split('*') startswith = modsec[0] endswith = modsec[-1] findsec = modsec[1:-1] allsections = self.config.sections() for s in allsections: if not s.startswith(startswith): continue if not s.endswith(endswith): continue found = True s_tmp = s for f in findsec: if not f in s_tmp: found = False break s_tmp = f.join(s_tmp.split(f)[1:]) if not found: continue expsec.append(s) sections.remove(sec) return sections + expsec def fill_env_hidden_section(self): """fill ENV section with environment variables """ try: self.config.add_section('ENV') except configparser.DuplicateSectionError: pass for k,v in os.environ.items(): self.config.set('ENV', k, v.replace("$","$$")) def fill_log_hidden_section(self): """Add LOG section filled with log results of stages """ try: self.config.add_section('LOG') except configparser.DuplicateSectionError: pass thefulllog = self.logger.getLog(None) for e in thefulllog: self.fill_log_interpolation(e['section'].replace(":","_"), e) def check_for_nondefined_env_variables(self): # Check for ENV input required_env = [] all_file_contents = [] for testFile in self.log['inifiles']: file_contents = open(testFile, "r").read() file_contents = "\n".join(["%s %d: %s" % (testFile.split("/")[-1],i,l) for i,l in enumerate(file_contents.split("\n")) if not l.lstrip().startswith("#")]) all_file_contents.append(file_contents) if "${ENV:" in file_contents: required_env.extend([s.split("}")[0] for s in file_contents.split("${ENV:")[1:]]) env_not_found = set([e for e in required_env if e not in os.environ.keys()]) lines_with_env_not_found = [] for file_contents in all_file_contents: lines_with_env_not_found.extend(["%s: %s"%(",".join([e for e in env_not_found if "${ENV:%s}"%e in l]),l) \ for l in file_contents.split("\n") \ if sum(["${ENV:%s}"%e in l for e in env_not_found])]) if lines_with_env_not_found: print("ERROR: Not all required environment variables are defined.") print("ERROR: Still need:") for l in lines_with_env_not_found: print("ERROR: %s"%l) sys.exit(1) def configTest(self): # setup the configuration parser self.config = configparser.SafeConfigParser(interpolation=configparser.ExtendedInterpolation()) # Set the config parser to make option names case sensitive. self.config.optionxform = str # fill ENV section with environemt variables self.fill_env_hidden_section() # log the list of files - note that the argument parser # puts the input files in a list, with the first member # being the list of input files self.log['inifiles'] = self.args.ini_files[0] # initialize the list of active sections self.actives = [] # if they specified a list to execute, then use it sections = [] if self.args.section: sections = self.args.section.split(",") skip = False elif self.args.skipsections: sections = self.args.skipsections.split(",") skip = True else: sections = None # cycle thru the input files for testFile in self.log['inifiles']: if not os.path.isfile(testFile): print("Test description file",testFile,"not found!") sys.exit(1) self.config.read(self.log['inifiles']) # Check for ENV input self.check_for_nondefined_env_variables() # find all the sections that match the wild card and expand them # this is simple wild carding, ie *text, text*, *text* and * # should all work if sections is not None: sections = self.expandWildCards(sections) #if sections is not None: # expsec = [] # cpsections = list(sections) # for sec in cpsections: # if '*' in sec: # modsec = sec.replace('*','') # for s in self.config.sections(): # if modsec in s: # expsec.append(s) # sections.remove(sec) # sections = sections + expsec for section in self.config.sections(): if section.startswith("SKIP") or section.startswith("skip"): # users often want to temporarily ignore a section # of their test definition file, but don't want to # remove it lest they forget what it did. So let # them just mark the section as "skip" to be ignored continue # if we are to filter the sections, then do so takeus = True if sections is not None: found = False for sec in sections: if sec == section: found = True sections.remove(sec) if skip: takeus = False break if not found and not skip: takeus = False if takeus: self.actives.append(section) if sections is not None and 0 != len(sections) and not skip: print("ERROR: sections were specified for execution and not found:",sections) sys.exit(1) # set Defaults -command line args supercede .ini args try: if not self.options['scratchdir']: self.options['scratchdir'] = self.config.get('MTTDefaults', 'scratchdir') except: try: self.options['scratchdir'] = self.config.get('MTTDefaults', 'scratch') except: self.options['scratchdir'] = os.path.abspath('./mttscratch') self.options['scratchdir'] = os.path.abspath(self.options['scratchdir']) try: if not self.options['executor']: self.options['executor'] = self.config.get('MTTDefaults', 'executor') except: self.options['executor'] = 'sequential' # if they want us to clear the scratch, then do so if self.options['clean'] and os.path.isdir(self.options['scratchdir']) : shutil.rmtree(self.options['scratchdir']) # setup the scratch directory _mkdir_recursive(self.options['scratchdir']) return # Used with combinatorial executor, loads next .ini file to be run with the # sequential executor def configNewTest(self, file): # clear the configuration parser for section in self.config.sections(): self.config.remove_section(section) # read in the file self.config.read(file) for section in self.config.sections(): if section.startswith("SKIP") or section.startswith("skip"): # users often want to temporarily ignore a section # of their test definition file, but don't want to # remove it lest they forget what it did. So let # them just mark the section as "skip" to be ignored continue if self.logger is not None: self.logger.verbose_print("SECTION: " + section) self.logger.verbose_print(self.config.items(section)) return def executeTest(self, executor="sequential"): self.logger.print_cmdline_args(self) if not self.loaded: print("Plugins have not been loaded - cannot execute test") sys.exit(1) if self.config is None: print("No test definition file was parsed - cannot execute test") sys.exit(1) if not self.tools.getPluginByName(executor, "Executor"): print("Specified executor %s not found" % executor) sys.exit(1) # activate the specified plugin self.tools.activatePluginByName(executor, "Executor") # execute the provided test description executor = self.tools.getPluginByName(executor, "Executor") status = executor.plugin_object.execute(self) if status == 0 and self.options['clean_after'] and os.path.isdir(self.options['scratchdir']): self.logger.verbose_print("Cleaning up scratchdir after successful run") shutil.rmtree(self.options['scratchdir']) return status def printOptions(self, options): # if the options are empty, report that if not options: lines = ["None"] return lines # create the list of options opts = [] vals = list(options.keys()) for val in vals: opts.append(val) if options[val][0] is None: opts.append("None") elif isinstance(options[val][0], bool): if options[val][0]: opts.append("True") else: opts.append("False") elif isinstance(options[val][0], list): opts.append(" ".join(options[val][0])) elif isinstance(options[val][0], int): opts.append(str(options[val][0])) else: opts.append(options[val][0]) opts.append(options[val][1]) # print the options, their default value, and # the help description in 3 column format max1 = 0 max2 = 0 for i in range(0,len(opts),3): # we want all the columns to line up # and left-justify, so first find out # the max len of each of the first two # column entries if len(opts[i]) > max1: max1 = len(opts[i]) if type(opts[i+1]) is not str: optout = str(opts[i+1]) else: optout = opts[i+1] if len(optout) > max2: max2 = len(optout) # provide some spacing max1 = max1 + 4 max2 = max2 + 4 # cycle thru again, padding each entry to # align the columns lines = [] sp = " " for i in range(0,len(opts),3): line = opts[i] + (max1-len(opts[i]))*sp if type(opts[i+1]) is not str: optout = str(opts[i+1]) else: optout = opts[i+1] line = line + optout + (max2-len(optout))*sp # to make this more readable, we will wrap the line at # 130 characters. First, see if the line is going to be # too long if 130 < (len(line) + len(opts[i+2])): # split the remaining column into individual words words = opts[i+2].split() first = True for word in words: if (len(line) + len(word)) < 130: if first: line = line + word first = False else: line = line + " " + word else: lines.append(line) line = (max1 + max2)*sp + word if 0 < len(line): lines.append(line) else: # the line is fine - so just add the last piece line = line + opts[i+2] # append the result lines.append(line) # add one blank line lines.append("") return lines def selectPlugin(self, name, category): if category == "stage": try: availStages = list(self.loader.stages.keys()) for stage in availStages: for pluginInfo in self.stages.getPluginsOfCategory(stage): if name == pluginInfo.plugin_object.print_name(): return pluginInfo.plugin_object # didn't find it return None except: return None elif category == "tool": try: availTools = list(self.loader.tools.keys()) for tool in availTools: for pluginInfo in self.tools.getPluginsOfCategory(tool): if name == pluginInfo.plugin_object.print_name(): return pluginInfo.plugin_object # didn't find it return None except: return None elif category == "utility": try: availUtils = list(self.loader.utilities.keys()) for util in availUtils: for pluginInfo in self.utilities.getPluginsOfCategory(util): if name == pluginInfo.plugin_object.print_name(): return pluginInfo.plugin_object # didn't find it return None except: return None else: print("Unrecognized category:",category) return None
try: dbconn = psycopg2.connect(host=cfg.get("connect", "host"), database=cfg.get("connect", "database"), user=cfg.get("connect", "user"), password=cfg.get("connect", "password"), application_name=os.path.basename(sys.argv[0])) except psycopg2.Error: handle_pg_exception(sys.exc_info()) except configparser.Error: handle_cfg_exception(sys.exc_info()) # Get context cmd = argparse.ArgumentParser(description="Email system manager") try: cmd.add_argument("context", help="Execution context", choices=plugin_names, nargs="?", default=cfg.get("call", "context")) cmd.add_argument("contextargs", help="Arguments relevant in a chosen context", nargs=argparse.REMAINDER) except configparser.Error: handle_cfg_exception(sys.exc_info()) args = cmd.parse_args() # Run the plugin PM.getPluginByName(args.context).plugin_object.configure( args.context, cfg, args.contextargs, dbconn) PM.getPluginByName(args.context).plugin_object.process()
class MainWindow(QtWidgets.QMainWindow): def __init__(self): super(MainWindow, self).__init__() self.ui = Ui_MainWindow() self.ui.setupUi(self) self.setWindowTitle('QDICOMMiner ' + __version__) # TODO: Find a better way of stopping the selection of list items. This only sortof works self.ui.listWidget.setSelectionMode(QAbstractItemView.NoSelection) system_location = os.path.dirname(os.path.realpath(__file__)) plugin_locations = [os.path.join(system_location, 'Plugins')] self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces(plugin_locations) self.plugin_manager.collectPlugins() self.plugin_list = [] for plugin in self.plugin_manager.getAllPlugins(): self.plugin_list.append(plugin.name) # Read the settings from the settings.ini file system_location = os.path.dirname(os.path.realpath(__file__)) QSettings.setPath(QSettings.IniFormat, QSettings.SystemScope, system_location) self.settings = QSettings("settings.ini", QSettings.IniFormat) if os.path.exists(system_location + "/settings.ini"): print("Loading settings from " + system_location + "/settings.ini") # Set the last used output file and analyse folder locations output_file = self.settings.value('main/lastOutputFile') if output_file is None: output_file = 'data.csv' if not os.path.isabs(output_file): abs_output_location = os.path.join(system_location, output_file) self.ui.labelOutputFile.setText(abs_output_location) else: self.ui.labelOutputFile.setText(output_file) folder_to_analyse = self.settings.value('main/lastAnalyseFolder') if folder_to_analyse is None: folder_to_analyse = '' if not os.path.isabs(folder_to_analyse): abs_folder_to_analyse = os.path.join(system_location, folder_to_analyse) self.ui.labelFolderToAnalysePath.setText(abs_folder_to_analyse) else: self.ui.labelFolderToAnalysePath.setText(folder_to_analyse) self.ui.labelFolderToAnalysePath.clicked.connect( lambda: self.open_folder_in_explorer( self.ui.labelFolderToAnalysePath.text())) self.ui.labelOutputFile.clicked.connect( lambda: self.open_folder_in_explorer(self.ui.labelOutputFile.text( ))) # Setup a dictionary of key DICOM description and value the DICOM tag names = [] self.DICOM_dic = {} for key in dicom_dict.DicomDictionary: names.append(dicom_dict.DicomDictionary[key][2]) self.DICOM_dic[dicom_dict.DicomDictionary[key][2]] = key self.completer = QCompleter() self.completer.setCaseSensitivity(Qt.CaseInsensitive) self.model = QStringListModel() self.model.setStringList(names) self.completer.setModel(self.model) self.ui.pushButtonAddListWidget.clicked.connect( self.add_new_list_widget) self.ui.pushButtonBrowseFolderToAnalysePath.clicked.connect( self.browse_for_input_folder) self.ui.pushButtonBrowseOutputFilePath.clicked.connect( self.browse_for_output_file) self.ui.pushButtonDoAnalysis.clicked.connect(self.do_analysis) self.count_num_of_files_thread = CountFilesThread() self.count_num_of_files_thread.num_of_files.connect( self.update_number_of_files) self.count_file_number.connect(self.count_num_of_files_thread.count) self.count_file_number.emit(self.ui.labelFolderToAnalysePath.text() ) # Using a signal to keep thread safety self.ui.progressBar.setFormat(' %v/%m (%p%)') self.ui.progressBar.hide() self.ui.actionSave_Template.triggered.connect(self.save_template) self.ui.actionLoad_Template.triggered.connect(self.load_template) self.ui.actionAbout.triggered.connect(self.open_about_window) self.analyse_and_output_data_thread = AnalyseAndOutputDataThread() self.show() @staticmethod def open_folder_in_explorer(location): if os.path.isfile(location): folder_location = os.path.dirname(location) else: folder_location = location QDesktopServices.openUrl(QUrl.fromLocalFile(folder_location)) def save_template(self): dic = { 'DICOM_tag': [], 'File_information': [], 'Custom_plugins': [], 'Version': __version__ } for index in range(self.ui.listWidget.count()): custom_widget = self.ui.listWidget.itemWidget( self.ui.listWidget.item(index)) if custom_widget.comboBoxAttributeChoice.currentText( ) == AttributeOptions.DICOM_TAG.value: text = custom_widget.lineEdit.text() dic['DICOM_tag'].append(text) elif custom_widget.comboBoxAttributeChoice.currentText( ) == AttributeOptions.FILE_INFORMATION.value: text = custom_widget.comboBoxFileOption.currentText() dic['File_information'].append(text) elif custom_widget.comboBoxAttributeChoice.currentText( ) == AttributeOptions.CUSTOM_PLUGIN.value: text = custom_widget.comboBoxPluginOption.currentText() dic['Custom_plugins'].append(text) else: raise NotImplementedError filepath = QFileDialog.getSaveFileName(self, 'Save template file', '.', '(*.json)')[0] if filepath != '': if not filepath.endswith('.json'): filepath += '.json' with open(filepath, 'w') as f: json.dump(dic, f) def load_template(self): filepath = QFileDialog.getOpenFileName(self, 'Load template file', '.', '*.json')[0] if filepath != '': with open(filepath, 'r') as f: try: dic = json.load(f) except json.JSONDecodeError: msg_box = QMessageBox() msg_box.setWindowTitle("Error") msg_box.setText('Failed to open ' + filepath + ' (not a valid JSON file)') msg_box.setIcon(QMessageBox.Critical) msg_box.exec() return self.ui.listWidget.clear() try: if 'DICOM_tag' in dic: for tag in dic['DICOM_tag']: self.add_new_list_widget(default_text=tag) if 'File_information' in dic: for file_option in dic['File_information']: self.add_new_list_widget( attribute_type=AttributeOptions. FILE_INFORMATION, combo_box_text=file_option) if 'Custom_plugins' in dic: for plugin_name in dic['Custom_plugins']: if plugin_name not in [ plugin.name for plugin in self.plugin_manager.getAllPlugins() ]: msg_box = QMessageBox() msg_box.setWindowTitle("Warning") msg_box.setText( 'A required custom plugin (' + plugin_name + ") isn't installed and will be skipped") msg_box.setIcon(QMessageBox.Warning) msg_box.exec() else: self.add_new_list_widget( attribute_type=AttributeOptions. CUSTOM_PLUGIN, combo_box_text=plugin_name) except KeyError: msg_box = QMessageBox() msg_box.setWindowTitle("Error") msg_box.setText('Failed to apply template file ' + filepath + ' (Missing information in template file)') msg_box.setIcon(QMessageBox.Critical) msg_box.exec() return @staticmethod def open_about_window(): msg_box = QMessageBox() msg_box.setWindowTitle("About") msg_box.setTextFormat(Qt.RichText) msg_box.setText( "QDICOMMiner version " + __version__ + "<br> Written by Keith Offer" + "<br> Relies heavily on the <a href='http://www.pydicom.org/'>pydicom</a> library" ) msg_box.setIcon(QMessageBox.Information) msg_box.exec() def update_number_of_files(self, num): self.ui.labelNumberOfFiles.setText(str(num) + ' files') self.ui.progressBar.setMaximum(num) # The checked variable is emitted from the signal, but we don't use it here def add_new_list_widget(self, checked=False, default_text='', attribute_type=AttributeOptions.DICOM_TAG, combo_box_text=None): new_list_widget_item = QListWidgetItem() custom_widget = CustomListWidget(self, self.plugin_list) new_list_widget_item.setSizeHint(custom_widget.sizeHint()) custom_widget.lineEdit.setCompleter(self.completer) custom_widget.lineEdit.textChanged.connect(self.line_edit_text_changed) custom_widget.pushButton.clicked.connect( lambda: self.remove_widget_from_list(new_list_widget_item)) if attribute_type == AttributeOptions.DICOM_TAG: custom_widget.lineEdit.setText(default_text) elif attribute_type == AttributeOptions.FILE_INFORMATION: # TODO: Should I throw an exception / error message if the index isn't >= 0? file_info_index = custom_widget.comboBoxFileOption.findText( combo_box_text) if file_info_index >= 0: custom_widget.comboBoxFileOption.setCurrentIndex( file_info_index) custom_widget.comboBoxFileOption.setVisible(True) custom_widget.lineEdit.setVisible(False) elif attribute_type == AttributeOptions.CUSTOM_PLUGIN: plugin_index = custom_widget.comboBoxPluginOption.findText( combo_box_text) if plugin_index >= 0: custom_widget.comboBoxPluginOption.setCurrentIndex( plugin_index) custom_widget.comboBoxPluginOption.setVisible(True) custom_widget.lineEdit.setVisible(False) else: raise NotImplementedError attribute_index = custom_widget.comboBoxAttributeChoice.findText( attribute_type.value) if attribute_index >= 0: custom_widget.comboBoxAttributeChoice.setCurrentIndex( attribute_index) self.ui.listWidget.addItem(new_list_widget_item) self.ui.listWidget.setItemWidget(new_list_widget_item, custom_widget) def line_edit_text_changed(self, new_string): sending_line_edit = self.sender() if new_string != '' and (new_string in self.DICOM_dic or re.match(dicom_tag_regex, new_string)): sending_line_edit.setStyleSheet( "QLineEdit { background: rgb(0, 255, 0); }") else: sending_line_edit.setStyleSheet( "QLineEdit { background: rgb(255, 0, 0); }") def remove_widget_from_list(self, list_widget_item): self.ui.listWidget.takeItem(self.ui.listWidget.row(list_widget_item)) def browse_for_input_folder(self): starting_location = self.settings.value('main/lastAnalyseFolder') if starting_location is None: starting_location = '.' filepath = QFileDialog.getExistingDirectory(self, 'Input directory', starting_location) if filepath != '': self.ui.labelFolderToAnalysePath.setText(filepath) self.count_file_number.emit(filepath) self.settings.setValue('main/lastAnalyseFolder', filepath) def browse_for_output_file(self): starting_location = self.settings.value('main/lastOutputFile') if starting_location is None: starting_location = '.' # This looks a bit strange, but filenames are the first return value of this function # so we need the [0] on the end to grab what we need filepath = QFileDialog.getSaveFileName(self, 'Output file', starting_location, '(*.csv)')[0] if filepath != '': if not filepath.endswith('.csv'): filepath += '.csv' self.ui.labelOutputFile.setText(filepath) self.settings.setValue('main/lastOutputFile', filepath) def do_analysis(self): if os.path.exists(self.ui.labelOutputFile.text()): msg_box = QMessageBox() msg_box.setIcon(QMessageBox.Question) msg_box.setText( "The output file " + self.ui.labelOutputFile.text() + " already exists. Are you sure you want to overwrite it?") overwrite_button = QPushButton('Overwrite') msg_box.addButton(overwrite_button, QMessageBox.YesRole) msg_box.addButton(QPushButton('Cancel'), QMessageBox.RejectRole) msg_box.exec() if msg_box.clickedButton() != overwrite_button: return header_DICOM = '' header_file_info = '' header_custom_plugins = '' dicom_tags = [] file_attributes = [] selected_plugins = [] # Generate the CSV header for index in range(self.ui.listWidget.count()): custom_widget = self.ui.listWidget.itemWidget( self.ui.listWidget.item(index)) if custom_widget.comboBoxAttributeChoice.currentText( ) == AttributeOptions.FILE_INFORMATION.value: # Handle file attributes (e.g. path, size etc) header_file_info += custom_widget.comboBoxFileOption.currentText( ) + ',' file_attributes.append( custom_widget.comboBoxFileOption.currentText()) elif custom_widget.comboBoxAttributeChoice.currentText( ) == AttributeOptions.DICOM_TAG.value: # Handle DICOM tags text = custom_widget.lineEdit.text() try: if text == '': # We have to manually raise this as searching for '' won't throw an exception but won't work raise KeyError if re.match(dicom_tag_regex, text): search_results = re.search(dicom_tag_regex, text) # Note that group 0 is the whole match that we don't want dicom_tags.append( (search_results.group(1), search_results.group(2))) else: dicom_tags.append( self.DICOM_dic[custom_widget.lineEdit.text()]) except KeyError: msg_box = QMessageBox() msg_box.setWindowTitle("Error") msg_box.setText('"' + text + '" is not a valid attribute') msg_box.setIcon(QMessageBox.Critical) msg_box.exec() return header_DICOM += text.replace(',', ' ') + ',' elif custom_widget.comboBoxAttributeChoice.currentText( ) == AttributeOptions.CUSTOM_PLUGIN.value: plugin_name = custom_widget.comboBoxPluginOption.currentText() if plugin_name != '': header_custom_plugins += self.plugin_manager.getPluginByName( plugin_name).plugin_object.column_headers() selected_plugins.append(plugin_name) else: raise NotImplementedError csv_header = (header_file_info + header_DICOM + header_custom_plugins)[0:-1] # Remove the last comma self.ui.progressBar.show() self.analyse_and_output_data_thread.current_file.connect( lambda num: self.ui.progressBar.setValue(num)) self.create_csv.connect(self.analyse_and_output_data_thread.run) self.analyse_and_output_data_thread.finished.connect( self.csv_making_finished) self.create_csv.emit(self.ui.labelOutputFile.text(), self.ui.labelFolderToAnalysePath.text(), csv_header, dicom_tags, file_attributes, selected_plugins) def csv_making_finished(self): self.ui.progressBar.hide() create_csv = pyqtSignal(str, str, str, list, list, list) count_file_number = pyqtSignal(str)
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_tag = defaultdict(list) self.timeline = [] self.pages = [] self._scanned = False # This is the default config # TODO: fill it self.config = { "ARCHIVE_PATH": "", "ARCHIVE_FILENAME": "archive.html", "DEFAULT_LANG": "en", "OUTPUT_FOLDER": "output", "CACHE_FOLDER": "cache", "FILES_FOLDERS": {"files": ""}, "LISTINGS_FOLDER": "listings", "ADD_THIS_BUTTONS": True, "INDEX_DISPLAY_POST_COUNT": 10, "INDEX_TEASERS": False, "RSS_TEASERS": True, "MAX_IMAGE_SIZE": 1280, "USE_FILENAME_AS_TITLE": True, "SLUG_TAG_PATH": False, "INDEXES_TITLE": "", "INDEXES_PAGES": "", "FILTERS": {}, "USE_BUNDLES": True, "TAG_PAGES_ARE_INDEXES": False, "THEME": "default", "COMMENTS_IN_GALLERIES": False, "COMMENTS_IN_STORIES": False, "FILE_METADATA_REGEXP": None, "post_compilers": { "rest": [".txt", ".rst"], "markdown": [".md", ".mdown", ".markdown"], "html": [".html", ".htm"], }, } self.config.update(config) self.config["TRANSLATIONS"] = self.config.get("TRANSLATIONS", {self.config["DEFAULT_LANG"]: ""}) self.THEMES = utils.get_theme_chain(self.config["THEME"]) self.MESSAGES = utils.load_messages(self.THEMES, self.config["TRANSLATIONS"]) self.plugin_manager = PluginManager( categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, } ) self.plugin_manager.setPluginInfoExtension("plugin") self.plugin_manager.setPluginPlaces( [str(os.path.join(os.path.dirname(__file__), "plugins")), str(os.path.join(os.getcwd(), "plugins"))] ) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Command"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) pluginInfo.plugin_object.short_help = pluginInfo.description self.commands[pluginInfo.name] = pluginInfo.plugin_object # Activate all task plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) # set global_context for template rendering self.GLOBAL_CONTEXT = self.config.get("GLOBAL_CONTEXT", {}) self.GLOBAL_CONTEXT["messages"] = self.MESSAGES self.GLOBAL_CONTEXT["_link"] = self.link self.GLOBAL_CONTEXT["rel_link"] = self.rel_link self.GLOBAL_CONTEXT["abs_link"] = self.abs_link self.GLOBAL_CONTEXT["exists"] = self.file_exists self.GLOBAL_CONTEXT["add_this_buttons"] = self.config["ADD_THIS_BUTTONS"] self.GLOBAL_CONTEXT["index_display_post_count"] = self.config["INDEX_DISPLAY_POST_COUNT"] self.GLOBAL_CONTEXT["use_bundles"] = self.config["USE_BUNDLES"] if "date_format" not in self.GLOBAL_CONTEXT: self.GLOBAL_CONTEXT["date_format"] = "%Y-%m-%d %H:%M" # check if custom css exist and is not empty for files_path in list(self.config["FILES_FOLDERS"].keys()): custom_css_path = os.path.join(files_path, "assets/css/custom.css") if self.file_exists(custom_css_path, not_empty=True): self.GLOBAL_CONTEXT["has_custom_css"] = True break else: self.GLOBAL_CONTEXT["has_custom_css"] = False # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName(template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading %s template system plugin\n" % template_sys_name) sys.exit(1) self.template_system = pi.plugin_object lookup_dirs = [os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES] self.template_system.set_directories(lookup_dirs, self.config["CACHE_FOLDER"]) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for pluginInfo in self.plugin_manager.getPluginsOfCategory("PageCompiler"): self.compilers[pluginInfo.name] = pluginInfo.plugin_object.compile_html def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.post_compilers` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in list(self.config["post_compilers"].items()) if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit( "Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'post_compilers' in conf.py\n(The error is in" "one of %s)" % ", ".join(langs) ) elif len(langs) > 1: langs = langs[:1] else: exit("post_compilers in conf.py does not tell me how to " "handle '%s' extensions." % ext) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.config["GLOBAL_CONTEXT"]) local_context.update(context) data = self.template_system.render_template(template_name, None, local_context) assert isinstance(output_name, bytes) assert output_name.startswith(self.config["OUTPUT_FOLDER"].encode("utf8")) url_part = output_name.decode("utf8")[len(self.config["OUTPUT_FOLDER"]) + 1 :] # This is to support windows paths url_part = "/".join(url_part.split(os.sep)) src = urljoin(self.config["BLOG_URL"], url_part) parsed_src = urlsplit(src) src_elems = parsed_src.path.split("/")[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == "link": # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip("/"), context["lang"]) else: return dst # Normalize dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split("/")[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = "/".join([".."] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b"<!DOCTYPE html>" + lxml.html.tostring(doc, encoding="utf8") with open(output_name, "wb+") as post_file: post_file.write(data) def path(self, kind, name, lang, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ path = [] if kind == "tag_index": path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["TAG_PATH"], "index.html"] if _f] elif kind == "tag": if self.config["SLUG_TAG_PATH"]: name = utils.slugify(name) path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["TAG_PATH"], name + ".html"] if _f] elif kind == "tag_rss": if self.config["SLUG_TAG_PATH"]: name = utils.slugify(name) path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["TAG_PATH"], name + ".xml"] if _f] elif kind == "index": if name > 0: path = [ _f for _f in [self.config["TRANSLATIONS"][lang], self.config["INDEX_PATH"], "index-%s.html" % name] if _f ] else: path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["INDEX_PATH"], "index.html"] if _f] elif kind == "rss": path = [_f for _f in [self.config["TRANSLATIONS"][lang], self.config["RSS_PATH"], "rss.xml"] if _f] elif kind == "archive": if name: path = [ _f for _f in [self.config["TRANSLATIONS"][lang], self.config["ARCHIVE_PATH"], name, "index.html"] if _f ] else: path = [ _f for _f in [ self.config["TRANSLATIONS"][lang], self.config["ARCHIVE_PATH"], self.config["ARCHIVE_FILENAME"], ] if _f ] elif kind == "gallery": path = [_f for _f in [self.config["GALLERY_PATH"], name, "index.html"] if _f] elif kind == "listing": path = [_f for _f in [self.config["LISTINGS_FOLDER"], name + ".html"] if _f] if is_link: return "/" + ("/".join(path)) else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config["BLOG_URL"], dst) return urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urljoin(self.config["BLOG_URL"], src) dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split("/")[1:] dst_elems = parsed_dst.path.split("/")[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return "/".join([".."] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self): task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield {"name": b"all", "actions": None, "clean": True, "task_dep": task_dep} def scan_posts(self): """Scan all the posts.""" if not self._scanned: print("Scanning posts", end="") targets = set([]) for wildcard, destination, template_name, use_in_feeds in self.config["post_pages"]: print(".", end="") base_len = len(destination.split(os.sep)) dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.join(*([destination] + dirpath.split(os.sep)[base_len:])) for base_path in glob.glob(dir_glob): post = Post( base_path, self.config["CACHE_FOLDER"], dest_dir, use_in_feeds, self.config["TRANSLATIONS"], self.config["DEFAULT_LANG"], self.config["BLOG_URL"], self.MESSAGES, template_name, self.config["FILE_METADATA_REGEXP"], ) for lang, langpath in list(self.config["TRANSLATIONS"].items()): dest = (destination, langpath, dir_glob, post.pagenames[lang]) if dest in targets: raise Exception( "Duplicated output path %r in post %r" % (post.pagenames[lang], base_path) ) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[str(post.date.year)].append(post.post_name) for tag in post.tags: self.posts_per_tag[tag].append(post.post_name) else: self.pages.append(post) for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!") def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + self.template_system.template_deps(post.template_name) context["post"] = post context["lang"] = lang context["title"] = post.title(lang) context["description"] = post.description(lang) context["permalink"] = post.permalink(lang) context["page_list"] = self.pages if post.use_in_feeds: context["enable_comments"] = True else: context["enable_comments"] = self.config["COMMENTS_IN_STORIES"] output_name = os.path.join(self.config["OUTPUT_FOLDER"], post.destination_path(lang)).encode("utf8") deps_dict = copy(context) deps_dict.pop("post") if post.prev_post: deps_dict["PREV_LINK"] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict["NEXT_LINK"] = [post.next_post.permalink(lang)] deps_dict["OUTPUT_FOLDER"] = self.config["OUTPUT_FOLDER"] deps_dict["TRANSLATIONS"] = self.config["TRANSLATIONS"] deps_dict["global"] = self.config["GLOBAL_CONTEXT"] deps_dict["comments"] = context["enable_comments"] task = { "name": output_name, "file_dep": deps, "targets": [output_name], "actions": [(self.render_template, [post.template_name, output_name, context])], "clean": True, "uptodate": [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" # This is a name on disk, has to be bytes assert isinstance(output_name, bytes) deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config["BLOG_TITLE"] context["description"] = self.config["BLOG_DESCRIPTION"] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.titles[lang], p.permalink(lang)) for p in posts] deps_context["global"] = self.config["GLOBAL_CONTEXT"] task = { "name": output_name, "targets": [output_name], "file_dep": deps, "actions": [(self.render_template, [template_name, output_name, context])], "clean": True, "uptodate": [config_changed(deps_context)], } return utils.apply_filters(task, filters)
#Load the Modules if args.modules != None: run_mods = [] #Creating our plugin manager manager = PluginManager() manager.setPluginPlaces([cwd + "/modules"]) manager.collectPlugins() #Get all plugins if "all" in args.modules: run_mods = manager.getAllPlugins() else: #Load plugins listed for i in args.modules: run_mods.append(manager.getPluginByName(i)) #Print Module info if (args.modinfo): for i in run_mods: if i.name not in args.modexcl: print(fg(8) + '---' + fg(1) + i.name + fg(8) + '---' + rs) print(fg(8) + 'Description: ' + rs + i.description) print(fg(8) + 'Author: ' + rs + i.author) print(fg(8) + 'Website: ' + rs + i.website) print('') #URLs to use with modules if ((args.url != None and args.modules != None) or (args.urlist != None and args.modules != None)): urls = []
class WeaponSystem(rpyc.Service): ''' RPC Services: This is the code that does the actual password cracking and returns the results to orbital control. Currently only supports cracking using rainbow tables (RCrackPy) ''' is_initialized = False mutex = Lock() is_busy = False job_id = None def initialize(self): ''' Initializes variables, this should only be called once ''' logging.info("Weapon system initializing ...") self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces(["plugins/"]) self.plugin_manager.setCategoriesFilter(FILTERS) self.plugin_manager.collectPlugins() self.plugins = {} logging.info( "Loaded %d plugin(s)" % len(self.plugin_manager.getAllPlugins()) ) self.__cpu__() logging.info("Weapon system online, good hunting.") @atomic def on_connect(self): ''' Called when successfully connected ''' if not self.is_initialized: self.initialize() self.is_initialized = True logging.info("Uplink to orbital control active") def on_disconnect(self): ''' Called if the connection is lost/disconnected ''' logging.info("Disconnected from orbital command server.") def __cpu__(self): ''' Detects the number of CPU cores on a system (including virtual cores) ''' if cpu_count is not None: try: self.cpu_cores = cpu_count() logging.info("Detected %d CPU core(s)" % self.cpu_cores) except NotImplementedError: logging.error("Could not detect number of processors; assuming 1") self.cpu_cores = 1 else: try: self.cpu_cores = int(sysconf("SC_NPROCESSORS_CONF")) logging.info("Detected %d CPU core(s)" % self.cpu_cores) except ValueError: logging.error("Could not detect number of processors; assuming 1") self.cpu_cores = 1 ############################ [ EXPOSED METHODS ] ############################ @atomic def exposed_crack(self, plugin_name, job_id, hashes, **kwargs): ''' Exposes plugins calls ''' assert plugin_name in self.plugins self.is_busy = True self.job_id = job_id self.plugin_manager.activatePluginByName(plugin_name) plugin = self.plugin_manager.getPluginByName(plugin_name) results = plugin.execute(hashes, **kwargs) self.plugin_manager.deactivatePluginByName(plugin_name) self.job_id = None self.is_busy = False return results def exposed_get_plugin_names(self): ''' Returns what algorithms can be cracked ''' logging.info("Method called: exposed_get_capabilities") plugins = self.plugin_manager.getAllPlugins() return [plugin.name for plugin in plugins] def exposed_get_categories(self): ''' Return categories for which we have plugins ''' categories = [] for category in self.plugin_manager.getCategories(): if 0 < len(self.plugin_manager.getPluginsOfCategory(category)): categories.append(category) return categories def exposed_get_category_plugins(self, category): ''' Get plugin names for a category ''' plugins = self.plugin_manager.getPluginsOfCategory(category) return [plugin.name for plugin in plugins] def exposed_get_plugin_details(self, category, plugin_name): ''' Get plugin based on name details ''' plugin = self.plugin_manager.getPluginByName(plugin_name, category) info = {'name': plugin.name} info['author'] = plugin.details.get('Documentation', 'author') info['website'] = plugin.details.get('Documentation', 'website') info['version'] = plugin.details.get('Documentation', 'version') info['description'] = plugin.details.get('Documentation', 'description') info['copyright'] = plugin.details.get('Documentation', 'copyright') info['precomputation'] = plugin.details.getboolean('Core', 'precomputation') return info def exposed_ping(self): ''' Returns a pong message ''' return "PONG" def exposed_is_busy(self): ''' Returns True/False if the current system is busy (thread safe) ''' return self.is_busy def exposed_current_job_id(self): ''' Returns the current job id (thread safe) ''' return self.job_id def exposed_cpu_count(self): ''' Returns the number of detected cpu cores ''' return self.cpu_cores
pathname = os.path.dirname(sys.argv[0]) path=os.path.abspath(pathname) log = logging.getLogger('yapsy') print "Plugins loading." pluginManager = PluginManager() pluginManager.setPluginPlaces(["%s/plugins/" % path]) pluginManager.collectPlugins() dictPlugin={} for plugin in pluginManager.getAllPlugins(): dictPlugin[plugin.plugin_object.name]=pluginManager.getPluginByName(plugin.plugin_object.name) xmpp.add_event_handler("run_%s" % plugin.plugin_object.name,plugin.plugin_object.execute, threaded=True) xmpp.plugins=pluginManager xmpp.dbpgsql=dbpgsql xmpp.plugins_path="%s/plugins/" % path xmpp.monitor_path="%s/monitor/" % path xmpp.monitor_scripts_path="%s/monitor/scripts/" % path xmpp.config=botconfig monitor=Monitor(xmpp,1) xmpp.monitor=monitor xmpp.started=str(time.time())
class MonitorsManager(object): def __init__(self): self.plugin_location = [MONITOR_MODULE_DIR] self.pluginManager = PluginManager(plugin_info_ext="plugin") self.pluginManager.setPluginPlaces(self.plugin_location) self.pluginManager.collectPlugins() def configForms(self, device_serial, module_name=""): """ Get the configuration views of each modules to be displayed on web interface :return: dictionary of pluginInfo as key and form as value """ if module_name: plugin = self.pluginManager.getPluginByName(name=module_name) configForms = plugin.plugin_object.get_view() else: configForms = {} for pluginInfo in self.pluginManager.getAllPlugins(): form = pluginInfo.plugin_object.get_view() configForms[pluginInfo] = form return configForms def prepare(self, modules, session, device_serial): """ Initilizing the partition on android device according to the required settings of monitoring modules :param modules: module's configuration setting from user :param session: current session object :return: is successful """ for module_name, params in modules.iteritems(): #logs.debug("module name: %s" % module_name) #logs.debug("module parameters: %s" % modules[module_name].__str__()) print module_name if params["module_type"] == "monitor": plugin = self.pluginManager.getPluginByName(name=module_name) plugin.plugin_object.prepare(params=modules[module_name], session=session, device_serial=device_serial) return True def count_modules(self): count = 0 for pluginInfo in self.pluginManager.getAllPlugins(): #logs.info("Module %s loaded"% pluginInfo.name) count +=1 return count def modules_info(self, module_name=""): """ Get information with regards to each modules loaded. It includes author, category, copyright, description, details, name, version and website. :return: information of all modules loaded """ modules_info = {} if module_name: plugin = self.pluginManager.getPluginByName(name=module_name) modules_info[plugin.name]= plugin else: for pluginInfo in self.pluginManager.getAllPlugins(): modules_info[pluginInfo.name] = pluginInfo return modules_info def preSession(self, module, params, session, device_serial): """ Get the baseline of device that is required by the specific monitoring module :param module: :param params: :return: """ plugin = self.pluginManager.getPluginByName(name=module) params.pop("module_type") return plugin.plugin_object.preSession(params=params, module=module, session=session, device_serial=device_serial) def postSession(self, module, params, session, device_serial): """ Get the post device monitor session's information that is required by the specific monitoring module :param module: :param params: :return: """ plugin = self.pluginManager.getPluginByName(name=module) params.pop("module_type") return plugin.plugin_object.postSession(params=params, module=module, session=session, device_serial=device_serial) def daemons(self, module, params): """ Get the daemon's path of module :return: dictionary with key as daemon name and daemon's path as value. """ daemons = {} for pluginInfo in self.pluginManager.getAllPlugins(): if pluginInfo.name == module: daemons_path = pluginInfo.plugin_object.daemons(module,False) daemons[pluginInfo.name] = daemons_path # module name : daemons path #logs.debug(daemons) return daemons '''
class EfetchPluginManager(object): """This class manages and creates plugin objects""" def __init__(self, plugins_file, curr_directory): # Plugin Manager Setup self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces([curr_directory + u'/plugins/']) self.plugins_file = plugins_file self.reload_plugins() def reload_plugins_file(self): """Reloads all plugins from the YAML file""" self.config_file_plugins = self.load_plugin_config(self.plugins_file) def reload_plugins(self): """Reloads all Yapsy and YAML file plugins""" self.plugin_manager.collectPlugins() for plugin in self.plugin_manager.getAllPlugins(): self.plugin_manager.activatePluginByName(plugin.name) self.reload_plugins_file() def load_plugin_config(self, plugins_file): """Loads the plugin config file""" if not os.path.isfile(plugins_file): logging.warn(u'Could not find Plugin Configuration File "' + plugins_file + u'"') return {} with open(plugins_file, 'r') as stream: try: return yaml.load(stream) except yaml.YAMLError as exc: logging.error(u'Failed to parse Plugin Configuration File') logging.error(exc) return {} def get_all_plugins(self): """Gets a list of all the plugins""" plugins = [] for plugin in self.plugin_manager.getAllPlugins(): plugins.append(plugin.name) for key in self.config_file_plugins: plugins.append(key) return plugins def get_plugin_by_name(self, name): """Gets an Efetch plugin by name""" plugin = self.plugin_manager.getPluginByName(str(name).lower()) if not plugin and name not in self.config_file_plugins: logging.warn(u'Request made for unknown plugin "' + name + u'"') abort(404, u'Could not find plugin "' + name + u'"') elif not plugin: plugin = self.config_file_plugins[name] return Plugin(plugin.get('name', 'None'), plugin.get('description', 'None'), plugin.get('cache', True), plugin.get('popularity', 5), plugin.get('fast', False), plugin.get('store', False), map(str.lower, plugin.get('mimetypes', [])), map(str.lower, plugin.get('extensions', [])), map(str.lower, plugin.get('os', [])), plugin.get('command', False), plugin.get('format', 'Text'), plugin.get('file', False), plugin.get('openwith', False), plugin.get('icon', 'fa-file-o')) else: return plugin.plugin_object
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ EXTRA_PLUGINS = [ 'planetoid', 'ipynb', 'local_search', 'render_mustache', ] def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_month = defaultdict(list) self.posts_per_tag = defaultdict(list) self.post_per_file = {} self.timeline = [] self.pages = [] self._scanned = False if not config: self.configured = False else: self.configured = True # This is the default config self.config = { 'ADD_THIS_BUTTONS': True, 'ANALYTICS': '', 'ARCHIVE_PATH': "", 'ARCHIVE_FILENAME': "archive.html", 'CACHE_FOLDER': 'cache', 'CODE_COLOR_SCHEME': 'default', 'COMMENTS_IN_GALLERIES': False, 'COMMENTS_IN_STORIES': False, 'CONTENT_FOOTER': '', 'CREATE_MONTHLY_ARCHIVE': False, 'DATE_FORMAT': '%Y-%m-%d %H:%M', 'DEFAULT_LANG': "en", 'DEPLOY_COMMANDS': [], 'DISABLED_PLUGINS': (), 'DISQUS_FORUM': 'nikolademo', 'ENABLED_EXTRAS': (), 'EXTRA_HEAD_DATA': '', 'FAVICONS': {}, 'FILE_METADATA_REGEXP': None, 'FILES_FOLDERS': {'files': ''}, 'FILTERS': {}, 'GALLERY_PATH': 'galleries', 'GZIP_FILES': False, 'GZIP_EXTENSIONS': ('.txt', '.htm', '.html', '.css', '.js', '.json'), 'HIDE_UNTRANSLATED_POSTS': False, 'INDEX_DISPLAY_POST_COUNT': 10, 'INDEX_FILE': 'index.html', 'INDEX_TEASERS': False, 'INDEXES_TITLE': "", 'INDEXES_PAGES': "", 'INDEX_PATH': '', 'LICENSE': '', 'LINK_CHECK_WHITELIST': [], 'LISTINGS_FOLDER': 'listings', 'MARKDOWN_EXTENSIONS': ['fenced_code', 'codehilite'], 'MAX_IMAGE_SIZE': 1280, 'MATHJAX_CONFIG': '', 'OLD_THEME_SUPPORT': True, 'OUTPUT_FOLDER': 'output', 'post_compilers': { "rest": ('.txt', '.rst'), "markdown": ('.md', '.mdown', '.markdown'), "textile": ('.textile',), "txt2tags": ('.t2t',), "bbcode": ('.bb',), "wiki": ('.wiki',), "ipynb": ('.ipynb',), "html": ('.html', '.htm') }, 'POST_PAGES': ( ("posts/*.txt", "posts", "post.tmpl", True), ("stories/*.txt", "stories", "story.tmpl", False), ), 'PRETTY_URLS': False, 'REDIRECTIONS': [], 'RSS_LINK': None, 'RSS_PATH': '', 'RSS_TEASERS': True, 'SEARCH_FORM': '', 'SLUG_TAG_PATH': True, 'STORY_INDEX': False, 'STRIP_INDEXES': False, 'SITEMAP_INCLUDE_FILELESS_DIRS': True, 'TAG_PATH': 'categories', 'TAG_PAGES_ARE_INDEXES': False, 'THEME': 'site', 'THEME_REVEAL_CONGIF_SUBTHEME': 'sky', 'THEME_REVEAL_CONGIF_TRANSITION': 'cube', 'THUMBNAIL_SIZE': 180, 'USE_BUNDLES': True, 'USE_CDN': False, 'USE_FILENAME_AS_TITLE': True, 'TIMEZONE': None, } self.config.update(config) # STRIP_INDEX_HTML config has been replaces with STRIP_INDEXES # Port it if only the oldef form is there if 'STRIP_INDEX_HTML' in config and 'STRIP_INDEXES' not in config: print("WARNING: You should configure STRIP_INDEXES instead of STRIP_INDEX_HTML") self.config['STRIP_INDEXES'] = config['STRIP_INDEX_HTML'] # PRETTY_URLS defaults to enabling STRIP_INDEXES unless explicitly disabled if config.get('PRETTY_URLS', False) and 'STRIP_INDEXES' not in config: self.config['STRIP_INDEXES'] = True self.config['TRANSLATIONS'] = self.config.get('TRANSLATIONS', {self.config['DEFAULT_' 'LANG']: ''}) self.THEMES = utils.get_theme_chain(self.config['THEME']) self.MESSAGES = utils.load_messages(self.THEMES, self.config['TRANSLATIONS'], self.config['DEFAULT_LANG']) # SITE_URL is required, but if the deprecated BLOG_URL # is available, use it and warn if 'SITE_URL' not in self.config: if 'BLOG_URL' in self.config: print("WARNING: You should configure SITE_URL instead of BLOG_URL") self.config['SITE_URL'] = self.config['BLOG_URL'] self.default_lang = self.config['DEFAULT_LANG'] self.translations = self.config['TRANSLATIONS'] # BASE_URL defaults to SITE_URL if 'BASE_URL' not in self.config: self.config['BASE_URL'] = self.config.get('SITE_URL') self.plugin_manager = PluginManager(categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, "TaskMultiplier": TaskMultiplier, }) self.plugin_manager.setPluginInfoExtension('plugin') if sys.version_info[0] == 3: places = [ os.path.join(os.path.dirname(__file__), 'plugins'), os.path.join(os.getcwd(), 'plugins'), ] else: places = [ os.path.join(os.path.dirname(__file__), utils.sys_encode('plugins')), os.path.join(os.getcwd(), utils.sys_encode('plugins')), ] self.plugin_manager.setPluginPlaces(places) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("Command"): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory(plugin_info, "Command") continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) plugin_info.plugin_object.short_help = plugin_info.description self.commands[plugin_info.name] = plugin_info.plugin_object # Activate all task plugins for task_type in ["Task", "LateTask"]: for plugin_info in self.plugin_manager.getPluginsOfCategory(task_type): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all multiplier plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("TaskMultiplier"): if (plugin_info.name in self.config['DISABLED_PLUGINS'] or (plugin_info.name in self.EXTRA_PLUGINS and plugin_info.name not in self.config['ENABLED_EXTRAS'])): self.plugin_manager.removePluginFromCategory(plugin_info, task_type) continue self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # Activate all required compiler plugins for plugin_info in self.plugin_manager.getPluginsOfCategory("PageCompiler"): if plugin_info.name in self.config["post_compilers"].keys(): self.plugin_manager.activatePluginByName(plugin_info.name) plugin_info.plugin_object.set_site(self) # set global_context for template rendering self.GLOBAL_CONTEXT = { } self.GLOBAL_CONTEXT['messages'] = self.MESSAGES self.GLOBAL_CONTEXT['_link'] = self.link self.GLOBAL_CONTEXT['set_locale'] = s_l self.GLOBAL_CONTEXT['rel_link'] = self.rel_link self.GLOBAL_CONTEXT['abs_link'] = self.abs_link self.GLOBAL_CONTEXT['exists'] = self.file_exists self.GLOBAL_CONTEXT['SLUG_TAG_PATH'] = self.config[ 'SLUG_TAG_PATH'] self.GLOBAL_CONTEXT['add_this_buttons'] = self.config[ 'ADD_THIS_BUTTONS'] self.GLOBAL_CONTEXT['index_display_post_count'] = self.config[ 'INDEX_DISPLAY_POST_COUNT'] self.GLOBAL_CONTEXT['use_bundles'] = self.config['USE_BUNDLES'] self.GLOBAL_CONTEXT['use_cdn'] = self.config.get("USE_CDN") self.GLOBAL_CONTEXT['favicons'] = self.config['FAVICONS'] self.GLOBAL_CONTEXT['date_format'] = self.config.get( 'DATE_FORMAT', '%Y-%m-%d %H:%M') self.GLOBAL_CONTEXT['blog_author'] = self.config.get('BLOG_AUTHOR') self.GLOBAL_CONTEXT['blog_title'] = self.config.get('BLOG_TITLE') self.GLOBAL_CONTEXT['blog_url'] = self.config.get('SITE_URL', self.config.get('BLOG_URL')) self.GLOBAL_CONTEXT['blog_desc'] = self.config.get('BLOG_DESCRIPTION') self.GLOBAL_CONTEXT['analytics'] = self.config.get('ANALYTICS') self.GLOBAL_CONTEXT['translations'] = self.config.get('TRANSLATIONS') self.GLOBAL_CONTEXT['license'] = self.config.get('LICENSE') self.GLOBAL_CONTEXT['search_form'] = self.config.get('SEARCH_FORM') self.GLOBAL_CONTEXT['disqus_forum'] = self.config.get('DISQUS_FORUM') self.GLOBAL_CONTEXT['mathjax_config'] = self.config.get( 'MATHJAX_CONFIG') self.GLOBAL_CONTEXT['subtheme'] = self.config.get('THEME_REVEAL_CONGIF_SUBTHEME') self.GLOBAL_CONTEXT['transition'] = self.config.get('THEME_REVEAL_CONGIF_TRANSITION') self.GLOBAL_CONTEXT['content_footer'] = self.config.get( 'CONTENT_FOOTER') self.GLOBAL_CONTEXT['rss_path'] = self.config.get('RSS_PATH') self.GLOBAL_CONTEXT['rss_link'] = self.config.get('RSS_LINK') self.GLOBAL_CONTEXT['sidebar_links'] = utils.Functionary(list, self.config['DEFAULT_LANG']) for k, v in self.config.get('SIDEBAR_LINKS', {}).items(): self.GLOBAL_CONTEXT['sidebar_links'][k] = v self.GLOBAL_CONTEXT['twitter_card'] = self.config.get( 'TWITTER_CARD', {}) self.GLOBAL_CONTEXT['extra_head_data'] = self.config.get('EXTRA_HEAD_DATA') self.GLOBAL_CONTEXT.update(self.config.get('GLOBAL_CONTEXT', {})) # check if custom css exist and is not empty for files_path in list(self.config['FILES_FOLDERS'].keys()): custom_css_path = os.path.join(files_path, 'assets/css/custom.css') if self.file_exists(custom_css_path, not_empty=True): self.GLOBAL_CONTEXT['has_custom_css'] = True break else: self.GLOBAL_CONTEXT['has_custom_css'] = False # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName( template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading {0} template system " "plugin\n".format(template_sys_name)) sys.exit(1) self.template_system = pi.plugin_object lookup_dirs = ['templates'] + [os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES] self.template_system.set_directories(lookup_dirs, self.config['CACHE_FOLDER']) # Check consistency of USE_CDN and the current THEME (Issue #386) if self.config['USE_CDN']: bootstrap_path = utils.get_asset_path(os.path.join( 'assets', 'css', 'bootstrap.min.css'), self.THEMES) if bootstrap_path.split(os.sep)[-4] != 'site': warnings.warn('The USE_CDN option may be incompatible with your theme, because it uses a hosted version of bootstrap.') # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for plugin_info in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): self.compilers[plugin_info.name] = \ plugin_info.plugin_object def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.post_compilers` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in list(self.config['post_compilers'].items()) if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit("Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'post_compilers' in conf.py\n(The error is in" "one of {0})".format(', '.join(langs))) elif len(langs) > 1: langs = langs[:1] else: exit("post_compilers in conf.py does not tell me how to " "handle '{0}' extensions.".format(ext)) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.GLOBAL_CONTEXT) local_context.update(context) data = self.template_system.render_template( template_name, None, local_context) assert output_name.startswith( self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1:] # Treat our site as if output/ is "/" and then make all URLs relative, # making the site "relocatable" src = os.sep + url_part src = os.path.normpath(src) # The os.sep is because normpath will change "/" to "\" on windows src = "/".join(src.split(os.sep)) parsed_src = urlsplit(src) src_elems = parsed_src.path.split('/')[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == 'link': # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip('/'), context['lang']) else: return dst # Normalize dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b'<!DOCTYPE html>' + lxml.html.tostring(doc, encoding='utf8') with open(output_name, "wb+") as post_file: post_file.write(data) def current_lang(self): # FIXME: this is duplicated, turn into a mixin """Return the currently set locale, if it's one of the available translations, or default_lang.""" lang = utils.LocaleBorg().current_lang if lang: if lang in self.translations: return lang lang = lang.split('_')[0] if lang in self.translations: return lang # whatever return self.default_lang def path(self, kind, name, lang=None, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) * post_path (name is 1st element in a post_pages tuple) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ if lang is None: lang = self.current_lang() path = [] if kind == "tag_index": path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], self.config['INDEX_FILE']] if _f] elif kind == "tag": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".html"] if _f] elif kind == "tag_rss": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".xml"] if _f] elif kind == "index": if name not in [None, 0]: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index-{0}.html'.format(name)] if _f] else: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], self.config['INDEX_FILE']] if _f] elif kind == "post_path": path = [_f for _f in [self.config['TRANSLATIONS'][lang], os.path.dirname(name), self.config['INDEX_FILE']] if _f] elif kind == "rss": path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['RSS_PATH'], 'rss.xml'] if _f] elif kind == "archive": if name: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], name, self.config['INDEX_FILE']] if _f] else: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], self.config['ARCHIVE_FILENAME']] if _f] elif kind == "gallery": path = [_f for _f in [self.config['GALLERY_PATH'], name, self.config['INDEX_FILE']] if _f] elif kind == "listing": path = [_f for _f in [self.config['LISTINGS_FOLDER'], name + '.html'] if _f] if is_link: link = '/' + ('/'.join(path)) index_len = len(self.config['INDEX_FILE']) if self.config['STRIP_INDEXES'] and \ link[-(1 + index_len):] == '/' + self.config['INDEX_FILE']: return link[:-index_len] else: return link else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config['BASE_URL'], dst) return urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urljoin(self.config['BASE_URL'], src) dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split('/')[1:] dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self, name, plugin_category): def flatten(task): if isinstance(task, dict): yield task else: for t in task: for ft in flatten(t): yield ft task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory(plugin_category): for task in flatten(pluginInfo.plugin_object.gen_tasks()): yield task for multi in self.plugin_manager.getPluginsOfCategory("TaskMultiplier"): flag = False for task in multi.plugin_object.process(task, name): flag = True yield task if flag: task_dep.append('{0}_{1}'.format(name, multi.plugin_object.name)) if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield { 'name': name, 'actions': None, 'clean': True, 'task_dep': task_dep } def scan_posts(self): """Scan all the posts.""" if self._scanned: return print("Scanning posts", end='') tzinfo = None if self.config['TIMEZONE'] is not None: tzinfo = pytz.timezone(self.config['TIMEZONE']) current_time = utils.current_time(tzinfo) targets = set([]) for wildcard, destination, template_name, use_in_feeds in \ self.config['post_pages']: print(".", end='') dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.normpath(os.path.join(destination, os.path.relpath(dirpath, dirname))) full_list = glob.glob(dir_glob) # Now let's look for things that are not in default_lang for lang in self.config['TRANSLATIONS'].keys(): lang_glob = dir_glob + "." + lang translated_list = glob.glob(lang_glob) for fname in translated_list: orig_name = os.path.splitext(fname)[0] if orig_name in full_list: continue full_list.append(orig_name) for base_path in full_list: post = Post( base_path, self.config['CACHE_FOLDER'], dest_dir, use_in_feeds, self.config['TRANSLATIONS'], self.config['DEFAULT_LANG'], self.config['BASE_URL'], self.MESSAGES, template_name, self.config['FILE_METADATA_REGEXP'], self.config['STRIP_INDEXES'], self.config['INDEX_FILE'], tzinfo, current_time, self.config['HIDE_UNTRANSLATED_POSTS'], self.config['PRETTY_URLS'], ) for lang, langpath in list( self.config['TRANSLATIONS'].items()): dest = (destination, langpath, dir_glob, post.meta[lang]['slug']) if dest in targets: raise Exception('Duplicated output path {0!r} ' 'in post {1!r}'.format( post.meta[lang]['slug'], base_path)) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[ str(post.date.year)].append(post.post_name) self.posts_per_month[ '{0}/{1:02d}'.format(post.date.year, post.date.month)].append(post.post_name) for tag in post.alltags: self.posts_per_tag[tag].append(post.post_name) else: self.pages.append(post) if self.config['OLD_THEME_SUPPORT']: post._add_old_metadata() self.post_per_file[post.destination_path(lang=lang)] = post self.post_per_file[post.destination_path(lang=lang, extension=post.source_ext())] = post for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!") def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + \ self.template_system.template_deps(post.template_name) context['post'] = post context['lang'] = lang context['title'] = post.title(lang) context['description'] = post.description(lang) context['permalink'] = post.permalink(lang) context['page_list'] = self.pages if post.use_in_feeds: context['enable_comments'] = True else: context['enable_comments'] = self.config['COMMENTS_IN_STORIES'] extension = self.get_compiler(post.source_path).extension() output_name = os.path.join(self.config['OUTPUT_FOLDER'], post.destination_path(lang, extension)) deps_dict = copy(context) deps_dict.pop('post') if post.prev_post: deps_dict['PREV_LINK'] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict['NEXT_LINK'] = [post.next_post.permalink(lang)] deps_dict['OUTPUT_FOLDER'] = self.config['OUTPUT_FOLDER'] deps_dict['TRANSLATIONS'] = self.config['TRANSLATIONS'] deps_dict['global'] = self.GLOBAL_CONTEXT deps_dict['comments'] = context['enable_comments'] if post: deps_dict['post_translations'] = post.translated_to task = { 'name': os.path.normpath(output_name), 'file_dep': deps, 'targets': [output_name], 'actions': [(self.render_template, [post.template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config['BLOG_TITLE'] context["description"] = self.config['BLOG_DESCRIPTION'] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.meta[lang]['title'], p.permalink(lang)) for p in posts] deps_context["global"] = self.GLOBAL_CONTEXT task = { 'name': os.path.normpath(output_name), 'targets': [output_name], 'file_dep': deps, 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_context)] } return utils.apply_filters(task, filters)
class Validator(object): """ Class responsible for validation of job input data. It is also responsible for preparation of PBS scripts. """ def __init__(self): """ Initialize defaults. """ #: Min API level self.api_min = 1.0 #: Max API level self.api_max = 2.99 #: Current API level self.api_current = 2.0 #: Current job instance # self.job = None #: PluginManager instance self.pm = PluginManager() def init(self): """ Initialize Validator singleton. """ _plugins = [] # @TODO What about shared plugins?? for _service_name, _service in G.SERVICE_STORE.items(): # Plugin if _service.plugins is not None: _plugin_dir = os.path.join(conf.service_path_data, _service_name) _plugin_dir = os.path.join(_plugin_dir, 'plugins') _plugins.append(_plugin_dir) # Load plugins self.pm.setPluginPlaces(_plugins) self.pm.collectPlugins() def validate(self, job): """ Validate job input data and update :py:class:`Job` instance with validated data. :param job: :py:class:`Job` instance """ # Do not validate jobs for the second time. This will conserve # resources in case scheduler queue is full and we try to resubmit if job.data: return # Make sure job.service is defined job.status.service = 'default' # Load job file from jobs directory _name = os.path.join(conf.gate_path_jobs, job.id()) with open(_name) as _f: _data = json.load(_f) logger.debug(u'@Job - Loaded data file %s.', job.id()) logger.log(VERBOSE, _data) # Check if data contains service attribute and that such service was # initialized if 'service' not in _data or \ _data['service'] not in G.SERVICE_STORE or \ _data['service'] == 'default': raise ValidatorError("Not supported service: %s." % _data['service']) job.status.service = _data['service'] _service = G.SERVICE_STORE[_data['service']] job.status.scheduler = _service.config['scheduler'] # Make sure that input dictionary exists if 'input' not in _data: _data['input'] = {} elif not isinstance(_data['input'], dict): raise ValidatorError("The 'input' section is not a dictionary") # Make sure API level is correct if 'api' not in _data: raise ValidatorError("Job did not specify API level.") if not self.validate_float(['api'], _data['api'], self.api_min, self.api_max): raise ValidatorError("API level %s is not supported." % _data['api']) elif float(_data['api']) < self.api_current: # Deprecated API requested. Mark as such job.set_flag(Jobs.JobState.FLAG_OLD_API) # Make sure no unsupported sections were passed for _k in _data: if _k not in conf.service_allowed_sections: raise ValidatorError("Section '%s' is not allowed in job " "definition." % _k) # Load defaults _variables = { _k: _v['default'] for _k, _v in _service.variables.items() } # Load sets for _k, _v in _data['input'].items(): if _k in _service.sets: if isinstance(_v, str) or isinstance(_v, unicode): # Value specified as string - check the format using python # builtin conversion _v = int(_v) elif not isinstance(_v, int): # Value specified neither as int nor string - raise error raise ValidatorError( "Set variables have to be of type int or " "string. (%s: %s)" % (_k, _v) ) if _v != 1: raise ValidatorError( "Set variables only accept value of 1. " "(%s: %s)" % (_k, _v) ) _variables.update( {_kk: _vv for _kk, _vv in _service.sets[_k].items()} ) del _data['input'][_k] # Load variables for _k, _v in _data['input'].items(): if _k in conf.service_reserved_keys or _k.startswith('CIS_CHAIN'): raise ValidatorError( "The '%s' variable name is restricted." % _k) elif _k in _service.variables: _variables[_k] = _v else: raise ValidatorError( "Not supported variable: %s." % _k) # Check that all attribute names are defined in service configuration # Validate values of the attributes for _k, _v in _variables.items(): if _k in _service.variables: _variables[_k] = self.validate_value( [_k], _v, _service.variables[_k]) logger.debug( "Value passed validation: %s = %s", _k, _v ) # Check for possible reserved attribute names like service, name, # date elif _k in conf.service_reserved_keys: _variables[_k] = self.validate_value([_k], _v, G.SERVICE_STORE['default'].variables[_k]) logger.debug( "Value passed validation: %s = %s", _k, _v ) else: raise ValidatorError("Not supported variable: %s." % _k) # Validate job output chaining. Check if defined job IDs point to # existing jobs in 'done' state. if 'chain' in _data: if not isinstance(_data['chain'], list) and \ not isinstance(_data['chain'], tuple): raise ValidatorError("The 'chain' section is not a list") self.validate_chain(_data['chain']) _chain = [] # Generate keywords for script substitutions _i = 0 for _id in _data['chain']: _variables["CIS_CHAIN%s" % _i] = _id _i += 1 _chain.append(Jobs.JobChain(id=_id)) logger.debug( "Job chain IDs passed validation: %s" % _data['chain'] ) job.chain = _chain # Job scheduler if 'CIS_SCHEDULER' in _variables: job.status.scheduler = _variables['CIS_SCHEDULER'] # Update job data with default values job.data = Jobs.JobData(data=_variables) logger.log(VERBOSE, 'Validated input data:') logger.log(VERBOSE, _variables) def validate_value(self, path, value, template, nesting_level=0): """ Validate value for specified service attribute. :param path: a list of nested attribute names for logging, e.g. [object, list_attribute, 10] :param value: value to validate :param template: dictionary describing the variable :param nesting_level: current nesting level :return: validated variable The parameter **template** should be of the following form:: { 'type': 'float_array', 'default':[1.0, 2.5], 'values':[0,100], 'length':10 } - Allowed 'type's: * string * int * float * datetime * object * string_array * int_array * float_array * datetime_array * object_array - 'default' should be of an appropriate type. - 'values' defines allowed value of the variable: * string - white list of strings * int, float - [min. max] * datetime - strptime format string * object - dictionary with keys being attribute names and values dictionaries defining variable templates - 'length' is only mandatory for array types and defines maximum allowed length """ # temporary cache variables _variable_type = template['type'] _variable_allowed_values = template['values'] # Run specific validate method based on type of the validated value if _variable_type == 'string': # Attribute of type string check the table of allowed values if value not in _variable_allowed_values: raise ValidatorError( "%s = %s - Value not in the white list (%s)." % (".".join(path), value, _variable_allowed_values)) return value elif _variable_type == 'int': try: return self.validate_int( path, value, _variable_allowed_values[0], _variable_allowed_values[1] ) except IndexError: raise ValidatorError( "Wrong range definition for variable: %s" % ".".join(path) ) elif _variable_type == 'float': try: return self.validate_float( path, value, _variable_allowed_values[0], _variable_allowed_values[1] ) except IndexError: raise ValidatorError( "Wrong range definition for variable: %s" % ".".join(path) ) elif _variable_type == 'datetime': try: datetime.strptime(value, _variable_allowed_values) except ValueError: raise ValidatorError( "%s = %s - value not in supported format (%s)" % (".".join(path), value, _variable_allowed_values) ) except TypeError: raise ValidatorError( "%s = %s - date should be provided as a string (%s)" % (".".join(path), value, _variable_allowed_values) ) return value elif _variable_type == 'object': # prevent from infinite recurrence if nesting_level >= conf.service_max_nesting_level: raise ValidatorError( "Unsupported object nesting level above %d : %s" % (conf.service_max_nesting_level, ".".join(path)) ) return self.validate_object( path, value, _variable_allowed_values, nesting_level) elif _variable_type == 'string_array': return self.validate_array( path, value, template["length"], {'type': 'string', 'values': _variable_allowed_values}, nesting_level ) elif _variable_type == 'int_array': return self.validate_array( path, value, template["length"], {'type': 'int', 'values': _variable_allowed_values}, nesting_level ) elif _variable_type == 'float_array': return self.validate_array( path, value, template["length"], {'type': 'float', 'values': _variable_allowed_values}, nesting_level ) elif _variable_type == 'object_array': return self.validate_array( path, value, template["length"], {'type': 'object', 'values': _variable_allowed_values}, nesting_level ) raise ValidatorError("(%s)%s - Unknown variable type" % (_variable_type, ".".join(path))) def validate_array(self, path, value, length, template, nesting_level=0): """ Validate array types: string_array, int_array, float_array, datetime_array, object_array :param path: a list of nested attribute names for logging, e.g. [object, list_attribute, 10] :param value: value to validate :param length: maximum allowed length of the array :param template: template defining elements of the array :param nesting_level: current nesting level :return: validated array """ if not isinstance(value, list) and not isinstance(value, tuple): raise ValidatorError( "%s is not a proper array" % ".".join(path) ) try: if len(value) > length: raise ValidatorError( "len(%s) = %s - array exceeds allowed length (%s)" % (".".join(path), len(value), length) ) except IndexError: raise ValidatorError( "%s has wrong range definition" % ".".join(path) ) _i = 0 _result = [] for _v in value: _result.append(self.validate_value( path + [str(_i)], _v, { "type": template["type"], "values": template["values"] }, nesting_level )) return _result def validate_object(self, path, value, template, nesting_level): """ Validate object type :param path: a list of nested attribute names for logging, e.g. [object, list_attribute, 10] :param value: value to validate :param template: template defining the object structure :param nesting_level: current nesting level :return: validated array """ # Check the value format if not isinstance(value, dict): raise ValidatorError( "Value is not a proper dictionary: %s" % ".".join(path) ) # Increase recurrence level nesting_level += 1 # Load defaults _inner_variables = { _k: _v['default'] for _k, _v in template.items() } for _k, _v in value.items(): # Reserved keys if _k in conf.service_reserved_keys or _k.startswith('CIS_CHAIN'): raise ValidatorError( "The attribute '%s' name of object '%s' is restricted." % (_k, ".".join(path))) elif _k in template: # Validate value using reccurence _inner_variables[_k] = self.validate_value( path + [_k], _v, template[_k], nesting_level) else: raise ValidatorError( "Not supported attribute '%s' for object '%s'" % (_k, ".".join(path))) return _inner_variables def validate_int(self, path, value, min, max): """ Validate integer type :param path: a list of nested attribute names for logging, e.g. [object, list_attribute, 10] :param value: value to validate :param min: minimal allowed value :param max: maximum allowed value :return: validated int """ # Attribute of type int - check the format if isinstance(value, str) or isinstance(value, unicode): # Value specified as string - check the format using python # builtin conversion try: _v = int(value) except ValueError: raise ValidatorError("%s = %s - value does not decribe an integer" % (".".join(path), value) ) elif not isinstance(value, int): # Value specified neither as int nor string - raise error raise ValidatorError("%s = %s - value is not an int" % (".".join(path), value)) else: _v = value # Check that atrribute value falls in allowed range if _v < min or _v > max: raise ValidatorError( "%s = %s - value not in allowed range (%s)" % (".".join(path), _v, (min, max)) ) return _v def validate_float(self, path, value, min, max): """ Validate float type :param path: a list of nested attribute names for logging, e.g. [object, list_attribute, 10] :param value: value to validate :param min: minimal allowed value :param max: maximum allowed value :return: validated float """ # Attribute of type float - check the format if isinstance(value, str) or isinstance(value, unicode): # Value specified as string - check the format using python # builtin conversion try: _v = float(value) except ValueError: raise ValidatorError("%s = %s - value does not decribe a float" % (".".join(path), value) ) elif not isinstance(value, float) and not isinstance(value, int): # Value specified neither as float nor string - raise error raise ValidatorError("%s = %s - value is not a float" % (".".join(path), value)) else: _v = value # Check that atrribute value falls in allowed range if _v < min or _v > max: raise ValidatorError( "%s = %s - value not in allowed range (%s)" % (".".join(path), _v, (min, max)) ) return _v def validate_file(self, key, job, service): """ :throws: :throws: """ # Extract input file name _input_dir = os.path.join(conf.gate_path_input, job.id()) _input_file = os.path.join(_input_dir, key) # Check that input file is ready if not os.path.isfile(_input_file): raise ValidatorInputFileError("Missing input file %s." % _input_file) # Load plugin and run the validate method _plugin = self.pm.getPluginByName(service.variables[key]['plugin'], service.name) _plugin.plugin_object.validate(_input_file) def validate_file_csv(self, name, type, min, max): _f = open(name) _csv = csv.reader(_f) for _v in _csv: if type == 'int': if not self.validate_int("CSV element", _v, min, max): return False if type == 'float': if not self.validate_float("CSV element", _v, min, max): return False def validate_chain(self, chain): """ Validate input chains :param chain: list of job IDs this job depends on. """ # TODO check only if the chain exists not if its done. Make this job # wait if it is not finished _finished = [] try: _session = G.STATE_MANAGER.new_session() _finished = list( _j.id() for _j in G.STATE_MANAGER.get_job_list( 'done', session=_session ) ) _session.close() except: logger.error("@PBS - Unable to connect to DB.", exc_info=True) logger.debug('Finished jobs: %s', _finished) for _id in chain: # ID of type string check if it is listed among finished jobs if _id not in _finished: raise ValidatorError( "Chain job %s did not finish or does not exist." % _id)
args = parser.parse_args() if not (args.port or args.list or args.info): parser.error('No action requested. Use `--port serial_port` to connect to the bord; `--list` to show available plugins or `--info [plugin_name]` to get more information.') # Print list of available plugins and exit if args.list: print "Available plugins:" for plugin in manager.getAllPlugins(): print "\t-", plugin.name exit() # User wants more info about a plugin... if args.info: plugin=manager.getPluginByName(args.info) if plugin == None: # eg: if an import fail inside a plugin, yapsy skip it print "Error: [", args.info, "] not found or could not be loaded. Check name and requirements." else: print plugin.description plugin.plugin_object.show_help() exit() print "\n------------SETTINGS-------------" print "Notch filtering:", args.filtering print "\n-------INSTANTIATING BOARD-------" board = bci.OpenBCIBoard(port=args.port, daisy=args.daisy, filter_data=args.filtering) # Info about effective number of channels and sampling rate
class Cobiv(App): root = None observers = {} logger = logging.getLogger(__name__) def __init__(self, **kwargs): super(Cobiv, self).__init__(**kwargs) self.plugin_manager = PluginManager() self.plugin_manager.setPluginPlaces(["cobiv/modules"]) self.plugin_manager.setCategoriesFilter({ "View": View, "Entity": Entity, "Hud": Hud, "TagReader": TagReader, "Datasource": Datasource, "SetManager": SetManager, "BookManager": BookManager, "Gesture": Gesture }) self.plugin_manager.locatePlugins() self.plugin_manager.loadPlugins() for plugin in self.plugin_manager.getAllPlugins(): print("Plugin found : {} {} {}".format(plugin.plugin_object, plugin.name, plugin.categories)) config_path = os.path.join(os.path.expanduser('~'), '.cobiv') if not os.path.exists(config_path): os.makedirs(config_path) def build_yaml_config(self): if not os.path.exists('cobiv.yml'): f = open('cobiv.yml', 'w') data = self.root.build_yaml_main_config() for plugin in self.plugin_manager.getAllPlugins(): plugin.plugin_object.build_yaml_config(data) yaml.dump(data, f) f.close() f = open('cobiv.yml') config = yaml.safe_load(f) f.close() self.root.configuration = config self.root.read_yaml_main_config(config) for plugin in self.plugin_manager.getAllPlugins(): plugin.plugin_object.read_yaml_config(config) def build(self): self.root = MainContainer() for plugin in self.plugin_manager.getPluginsOfCategory("View"): self.root.available_views[ plugin.plugin_object.get_name()] = plugin.plugin_object self.build_yaml_config() for plugin in self.plugin_manager.getAllPlugins(): plugin.plugin_object.ready() print('plugin {} ready'.format(plugin.name)) self.logger.debug("plugin ready : " + str(plugin.name)) self.root.switch_view(self.get_config_value('startview', 'help')) self.title = "COBIV" self.root.ready() return self.root def on_start(self): # self.profile = cProfile.Profile() # self.profile.enable() pass def on_stop(self): for plugin in self.plugin_manager.getAllPlugins(): plugin.plugin_object.on_application_quit() # self.profile.disable() # self.profile.dump_stats('cobiv.profile') def lookup(self, name, category): plugin = self.plugin_manager.getPluginByName(name, category=category) return plugin.plugin_object if plugin is not None else None def lookups(self, category): return [ plugin.plugin_object for plugin in self.plugin_manager.getPluginsOfCategory(category) ] def register_event_observer(self, evt_name, callback): if not evt_name in self.observers: self.observers[evt_name] = [callback] else: self.observers[evt_name].append(callback) def fire_event(self, evt_name, *args, **kwargs): if evt_name in self.observers: for callback in self.observers[evt_name]: callback(*args, **kwargs) def get_user_path(self, *args): return os.path.join(os.path.expanduser('~'), '.cobiv', *args) def get_config_value(self, key, default=None): if self.root is None: return default keys = key.split('.') cfg = self.root.configuration for k in keys: if k in cfg: cfg = cfg.get(k) else: return default return cfg
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_tag = defaultdict(list) self.timeline = [] self.pages = [] self._scanned = False # This is the default config # TODO: fill it self.config = { 'ARCHIVE_PATH': "", 'ARCHIVE_FILENAME': "archive.html", 'DEFAULT_LANG': "en", 'OUTPUT_FOLDER': 'output', 'FILES_FOLDERS': { 'files': '' }, 'LISTINGS_FOLDER': 'listings', 'ADD_THIS_BUTTONS': True, 'INDEX_DISPLAY_POST_COUNT': 10, 'INDEX_TEASERS': False, 'MAX_IMAGE_SIZE': 1280, 'USE_FILENAME_AS_TITLE': True, 'SLUG_TAG_PATH': False, 'INDEXES_TITLE': "", 'INDEXES_PAGES': "", 'FILTERS': {}, 'USE_BUNDLES': True, 'TAG_PAGES_ARE_INDEXES': False, 'THEME': 'default', 'post_compilers': { "rest": ['.txt', '.rst'], "markdown": ['.md', '.mdown', '.markdown'], "html": ['.html', '.htm'], }, } self.config.update(config) self.config['TRANSLATIONS'] = self.config.get( 'TRANSLATIONS', {self.config['DEFAULT_LANG']: ''}) self.GLOBAL_CONTEXT = self.config.get('GLOBAL_CONTEXT', {}) self.THEMES = utils.get_theme_chain(self.config['THEME']) self.MESSAGES = utils.load_messages(self.THEMES, self.config['TRANSLATIONS']) self.GLOBAL_CONTEXT['messages'] = self.MESSAGES self.GLOBAL_CONTEXT['_link'] = self.link self.GLOBAL_CONTEXT['rel_link'] = self.rel_link self.GLOBAL_CONTEXT['abs_link'] = self.abs_link self.GLOBAL_CONTEXT['exists'] = self.file_exists self.GLOBAL_CONTEXT['add_this_buttons'] = self.config[ 'ADD_THIS_BUTTONS'] self.GLOBAL_CONTEXT['index_display_post_count'] = self.config[ 'INDEX_DISPLAY_POST_COUNT'] self.GLOBAL_CONTEXT['use_bundles'] = self.config['USE_BUNDLES'] self.plugin_manager = PluginManager( categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, }) self.plugin_manager.setPluginInfoExtension('plugin') self.plugin_manager.setPluginPlaces([ os.path.join(os.path.dirname(__file__), 'plugins'), os.path.join(os.getcwd(), 'plugins'), ]) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Command"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) pluginInfo.plugin_object.short_help = pluginInfo.description self.commands[pluginInfo.name] = pluginInfo.plugin_object # Activate all task plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName(template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading %s template system plugin\n" % template_sys_name) sys.exit(1) self.template_system = pi.plugin_object self.template_system.set_directories([ os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES ]) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for pluginInfo in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): self.compilers[pluginInfo.name] = \ pluginInfo.plugin_object.compile_html def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.post_compilers` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [ lang for lang, exts in self.config['post_compilers'].items() if ext in exts ] if len(langs) != 1: if len(set(langs)) > 1: exit("Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'post_compilers' in conf.py\n(The error is in" "one of %s)" % ', '.join(langs)) elif len(langs) > 1: langs = langs[:1] else: exit("post_compilers in conf.py does not tell me how to " "handle '%s' extensions." % ext) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.config['GLOBAL_CONTEXT']) local_context.update(context) data = self.template_system.render_template(template_name, None, local_context) assert output_name.startswith(self.config["OUTPUT_FOLDER"]) url_part = output_name[len(self.config["OUTPUT_FOLDER"]) + 1:] # This is to support windows paths url_part = "/".join(url_part.split(os.sep)) src = urlparse.urljoin(self.config["BLOG_URL"], url_part) parsed_src = urlparse.urlsplit(src) src_elems = parsed_src.path.split('/')[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse.urlparse(dst) if dst_url.netloc: if dst_url.scheme == 'link': # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip('/'), context['lang']) else: return dst # Normalize dst = urlparse.urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlparse.urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = '<!DOCTYPE html>' + lxml.html.tostring(doc, encoding='utf8') with open(output_name, "w+") as post_file: post_file.write(data) def path(self, kind, name, lang, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ path = [] if kind == "tag_index": path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], 'index.html' ]) elif kind == "tag": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".html" ]) elif kind == "tag_rss": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".xml" ]) elif kind == "index": if name > 0: path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index-%s.html' % name ]) else: path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index.html' ]) elif kind == "rss": path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['RSS_PATH'], 'rss.xml' ]) elif kind == "archive": if name: path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], name, 'index.html' ]) else: path = filter(None, [ self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], self.config['ARCHIVE_FILENAME'] ]) elif kind == "gallery": path = filter(None, [self.config['GALLERY_PATH'], name, 'index.html']) elif kind == "listing": path = filter(None, [self.config['LISTINGS_FOLDER'], name + '.html']) if is_link: return '/' + ('/'.join(path)) else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urlparse.urljoin(self.config['BLOG_URL'], dst) return urlparse.urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urlparse.urljoin(self.config['BLOG_URL'], src) dst = urlparse.urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlparse.urlsplit(src) parsed_dst = urlparse.urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split('/')[1:] dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self): task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield { 'name': 'all', 'actions': None, 'clean': True, 'task_dep': task_dep } def scan_posts(self): """Scan all the posts.""" if not self._scanned: print "Scanning posts ", targets = set([]) for wildcard, destination, _, use_in_feeds in \ self.config['post_pages']: print ".", for base_path in glob.glob(wildcard): post = Post(base_path, destination, use_in_feeds, self.config['TRANSLATIONS'], self.config['DEFAULT_LANG'], self.config['BLOG_URL'], self.MESSAGES) for lang, langpath in self.config['TRANSLATIONS'].items(): dest = (destination, langpath, post.pagenames[lang]) if dest in targets: raise Exception( 'Duplicated output path %r in post %r' % (post.pagenames[lang], base_path)) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[str(post.date.year)].append( post.post_name) for tag in post.tags: self.posts_per_tag[tag].append(post.post_name) else: self.pages.append(post) for name, post in self.global_data.items(): self.timeline.append(post) self.timeline.sort(cmp=lambda a, b: cmp(a.date, b.date)) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print "done!" def generic_page_renderer(self, lang, wildcard, template_name, destination, filters): """Render post fragments to final HTML pages.""" for post in glob.glob(wildcard): post_name = os.path.splitext(post)[0] context = {} post = self.global_data[post_name] deps = post.deps(lang) + \ self.template_system.template_deps(template_name) context['post'] = post context['lang'] = lang context['title'] = post.title(lang) context['description'] = post.description(lang) context['permalink'] = post.permalink(lang) context['page_list'] = self.pages output_name = os.path.join(self.config['OUTPUT_FOLDER'], self.config['TRANSLATIONS'][lang], destination, post.pagenames[lang] + ".html") deps_dict = copy(context) deps_dict.pop('post') if post.prev_post: deps_dict['PREV_LINK'] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict['NEXT_LINK'] = [post.next_post.permalink(lang)] deps_dict['OUTPUT_FOLDER'] = self.config['OUTPUT_FOLDER'] deps_dict['TRANSLATIONS'] = self.config['TRANSLATIONS'] deps_dict['global'] = self.config['GLOBAL_CONTEXT'] task = { 'name': output_name.encode('utf-8'), 'file_dep': deps, 'targets': [output_name], 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config['BLOG_TITLE'] context["description"] = self.config['BLOG_DESCRIPTION'] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.titles[lang], p.permalink(lang)) for p in posts] deps_context["global"] = self.config['GLOBAL_CONTEXT'] task = { 'name': output_name.encode('utf8'), 'targets': [output_name], 'file_dep': deps, 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_context)] } yield utils.apply_filters(task, filters)
class Config(object): """ The :class:`Config <Config>` is responsible for managing the plugins and executing given tasks. """ _configs = [] logging_verbosity = 0 def __init__(self, settings, name): """Summary Args: settings (dict): the dictionary of settings name (str): the config name """ assert settings is not None assert name is not None self._name = name self._settings = settings self._inspect_analyzer = None self._default_analyzer = None self._categories = None self._paths = _BUILTIN_PATHS self._plugins = None self._stacks = [] validate_config(settings) if Config.logging_verbosity < settings['verbosity']: configure_logging(settings['verbosity']) Config.logging_verbosity = settings['verbosity'] logger.info( 'Increased logging verbosity from %s to %s with the new config...', Config.logging_verbosity, settings['verbosity'] ) self._configure_plugins() def _configure_plugins(self): """ Handles initialization of the :class:`Config <Config>`. This method shouldn't be called outside of this class. """ logger.debug('Configuring Config') # Setup the locators # Inspection analyzer, mostly for builtin plugins # (resources, tasks, etc) self._inspect_analyzer = PluginFileAnalyzerInspection( 'inspector', _BUILTIN_PATHS ) # The default analyzer for any extension paths that we don't trust. self._default_analyzer = PluginFileAnalyzerWithInfoFile( 'default', extensions='plugin' ) # The order of the analyzers could matter. self._locator = PluginFileLocator( analyzers=[ self._inspect_analyzer, self._default_analyzer, ] ) # Create the categories filter dict self._categories = { "Resource": Resource, "Action": Action, "Storage": Storage, "Parser": Parser, } # Setup the search paths if self._settings and "extension_paths" in self._settings: self._paths.extend(self._settings["extension_paths"]) # Actually create the PluginManager self._plugins = PluginManager( categories_filter=self._categories, directories_list=self._paths, plugin_locator=self._locator ) # Collect the plugins self._plugins.collectPlugins() @property def name(self): return self._name @property def settings(self): return self._settings @classmethod def make(cls, settings=None, name=""): """ When first setting up the Config you should call this class method. Args: settings (dict, optional): desire settings values overriding the defaults. name (str, optional): the name of the config Returns: the created config obj """ logger.debug('Creating Config named "%s"', name) config_settings = _DEFAULT_SETTINGS if settings: config_settings.update(settings) assert config_settings is not None new_config = Config(config_settings, name) for index, config in enumerate(cls._configs): if config.name == name: logger.warn('Recreating Config named %s', name) cls._configs[index] = new_config break else: cls._configs.append(new_config) return new_config @classmethod def make_from_file(cls, filename, name=""): """ Loads the settings dict from a file and passes it to Config.make. Args: filename (str): name of the file to load name (str, optional): the name of the config Returns: Config: the created config obj """ settings = anyconfig.load(filename, safe=True) return cls.make(settings=settings, name=name) @classmethod def get(cls, name=""): """ Use this to access your desired Config. Args: name (str, optional): the unique name of the config you want returned. Returns: the config obj Raises: KeyError: if a config by that name does't exist. """ logger.debug('Retrieving Config named "%s"', name) for config in cls._configs: if config.name == name: return config else: raise KeyError('No config with the name {} exists'.format(name)) def get_plugins(self, category_name=None, plugin_name=None): """ get_plugins returns a deepcopy of all the plugins fitting the search criteria. While this isn't very memory efficient our plugins should be small and few between enough that it'll be worth getting independent copies of them. For example we will likely want to work with multiple copies of the Same Resource plugin. Args: category_name (str, optional): a category to search for plugins in. plugin_name (str, optional): the name of the plugin to look for. Returns: list: of the plugins that match the criteria. """ results = [] if category_name and plugin_name: plugin_info = self._plugins.getPluginByName( plugin_name, category=category_name ) if plugin_info: results.append(plugin_info.plugin_object.__class__()) elif category_name and not plugin_name: plugin_infos = self._plugins.getPluginsOfCategory(category_name) for plugin_info in plugin_infos: results.append(plugin_info.plugin_object.__class__()) elif plugin_name and not category_name: for category in self._plugins.getCategories(): plugin_info = self._plugins.getPluginByName( plugin_name, category=category ) if plugin_info: results.append(plugin_info.plugin_object.__class__()) elif not category_name and not plugin_name: plugin_infos = self._plugins.getAllPlugins() for plugin_info in plugin_infos: results.append(plugin_info.plugin_object.__class__()) return results
def run(self, output_file, folder_to_analyse, header, dicom_tags, file_attributes, custom_plugins): with open(output_file, 'w') as f: f.write(header + '\n') system_location = os.path.dirname(os.path.realpath(__file__)) plugin_locations = [os.path.join(system_location, 'Plugins')] # We make a new plugin manager here to insure they are running on the new thread plugin_manager = PluginManager() plugin_manager.setPluginPlaces(plugin_locations) plugin_manager.collectPlugins() count = 0 for dirpath, _, filenames in os.walk(folder_to_analyse): for filename in filenames: output_line = '' full_path = os.path.join(dirpath, filename) try: ds = pydicom.read_file(full_path) # If it isn't a valid DICOM file or we can't load the file, we'll just skip over it except (pydicom.errors.InvalidDicomError, FileNotFoundError, OSError, PermissionError): count += 1 self.current_file.emit(count) continue # except OSError as e: if e.args[0] != 6: # No such device or address raise else: count += 1 self.current_file.emit(count) continue # List the file attributes for attribute in file_attributes: try: if attribute == FileOptions.FILE_NAME.value: output_line += filename + ',' elif attribute == FileOptions.FILE_PATH.value: output_line += full_path + ',' elif attribute == FileOptions.FILE_SIZE.value: output_line += str( round( os.path.getsize(full_path) / (1000 * 1000), 3)) + ',' else: raise NotImplementedError except (FileNotFoundError, OSError, PermissionError): pass for tag in dicom_tags: output_line += get_dicom_value_from_tag(ds, tag) + ',' for plugin_name in custom_plugins: plugin = plugin_manager.getPluginByName( plugin_name).plugin_object output_line += plugin.generate_values(full_path, ds) output_line = output_line[0:-1] # Remove the last comma f.write(output_line + '\n') count += 1 self.current_file.emit(count) self.finished.emit()
class app(): db = None # the sqlite database of plugins plugins = None # Configured plugins storage = None # The plugin to use for storage PluginFolder = None # Folder where the plugins are MinionFolder = None # Folder where the minions are score = None # the plugin to use for scoring classify = None # the clasification plugin helper = None # The verum helper functions loc = None # The verum lcoation def __init__(self, PluginFolder=PluginFolder, MinionFolder=MinionFolder): #global PluginFolder self.PluginFolder = PluginFolder #global MinionsFolder self.MinionFolder = MinionFolder # Load enrichments database self.db = self.set_db() # LOAD HELPER FROM SAME DIRECTORY fp, pathname, description = imp.find_module("helper", [loc]) self.helper = imp.load_module("helper", fp, pathname, description) # Save the verum location self.loc = loc[:-6] # -6 removed the trailing "verum/" from the location. # Load the plugins Directory if self.PluginFolder: self.load_plugins() else: logging.warning("Plugin folder doesn't exist. Plugins not configured. Please run set_plugin_folder(<PluginFolder>) to set the plugin folder and then load_plugins() to load plugins.") ## PLUGIN FUNCTIONS def set_plugin_folder(self, PluginFolder): self.PluginFolder = PluginFolder def get_plugin_folder(self): return self.PluginFolder # Load the plugins from the plugin directory. def load_plugins(self): print "Configuring Plugin manager." self.plugins = PluginManager() if self.MinionFolder is None: self.plugins.setPluginPlaces([self.PluginFolder]) else: self.plugins.setPluginPlaces([self.PluginFolder, self.MinionFolder]) #self.plugins.collectPlugins() self.plugins.locatePlugins() self.plugins.loadPlugins() print "Plugin manager configured." # Loop round the plugins and print their names. cur = self.db.cursor() # Clear tables cur.execute("""DELETE FROM enrichments""") cur.execute("""DELETE FROM inputs""") cur.execute("""DELETE FROM storage""") cur.execute("""DELETE FROM score""") cur.execute("""DELETE FROM minion""") for plugin in self.plugins.getAllPlugins(): plugin_config = plugin.plugin_object.configure() # Insert enrichment if plugin_config[0] == 'enrichment': # type cur.execute('''INSERT INTO enrichments VALUES (?, ?, ?, ?, ?)''', (plugin_config[2], # Name int(plugin_config[1]), # Enabled plugin_config[3], # Descripton plugin_config[5], # Cost plugin_config[6]) # Speed ) for inp in plugin_config[4]: # inputs # Insert into inputs table cur.execute('''INSERT INTO inputs VALUES (?,?)''', (plugin_config[2], inp)) self.db.commit() elif plugin_config[0] == 'interface': # type cur.execute('''INSERT INTO storage VALUES (?, ?)''', (plugin_config[2], int(plugin_config[1]))) elif plugin_config[0] == 'score': cur.execute('''INSERT INTO score VALUES (?, ?, ?, ?, ?)''', (plugin_config[2], # Name int(plugin_config[1]), # Enabled plugin_config[3], # Descripton plugin_config[4], # Cost plugin_config[5]) # Speed ) if plugin_config[0] == 'minion': plugin_config = plugin.plugin_object.configure(self) cur.execute('''INSERT INTO minion VALUES (?, ?, ?, ?)''', (plugin_config[2], # Name int(plugin_config[1]), # Enabled plugin_config[3], # Descripton plugin_config[4]) # Speed ) if plugin.name == "classify": # Classify is a unique name. TODO: figure out if handling multiple 'classify' plugins is necessary self.classify = plugin.plugin_object print "Configured {2} plugin {0}. Success: {1}".format(plugin.name, plugin_config[1], plugin_config[0]) def set_db(self): """ Sets up the enrichment sqlite in memory database """ conn = sqlite3.connect(":memory:") cur = conn.cursor() # Create enrichments table cur.execute('''CREATE TABLE enrichments (name text NOT NULL PRIMARY KEY, configured int, description text, cost int, speed int);''') # Create inputs table cur.execute('''CREATE TABLE inputs (name text NOT NULL, input text NOT NULL, PRIMARY KEY (name, input), FOREIGN KEY (name) REFERENCES enrichments(name));''') # Create interface table cur.execute('''CREATE TABLE storage (name text NOT NULL PRIMARY KEY, configured int );''') # Create score table cur.execute('''CREATE TABLE score (name text NOT NULL PRIMARY KEY, configured int, description text, cost int, speed int);''') # Create minion table cur.execute('''CREATE TABLE minion (name text NOT NULL PRIMARY KEY, configured int, description text, cost int);''') conn.commit() return conn ## ENRICHMENT FUNCTIONS def get_inputs(self): """ NoneType -> list of strings :return: A list of the potential enrichment inputs (ip, domain, etc) """ inputs = list() cur = self.db.cursor() for row in cur.execute('''SELECT DISTINCT input FROM inputs;'''): inputs.append(row[0]) return inputs def get_enrichments(self, inputs, cost=10000, speed=10000, configured=True): """ :param inputs: list of input types. (e.g. ["ip", "domain"]) All enrichments that match at least 1 input type will be returned. :param cost: integer 1-10 of resource cost of running the enrichment. (1 = cheapest) :param speed: integer 1-10 speed of enrichment. (1 = fastest) :param enabled: Plugin is correctly configured. If false, plugin may not run correctly. :return: list of tuples of (names, type) of enrichments matching the criteria """ cur = self.db.cursor() if type(inputs) == str: inputs = [inputs] plugins = list() names = list() for row in cur.execute(""" SELECT DISTINCT e.name, i.input FROM enrichments e, inputs i WHERE e.name = i.name AND e.cost <= ? AND e.speed <= ? AND configured = ? AND i.input IN ({0})""".format(("?," * len(inputs))[:-1]), [cost, speed, int(configured)] + inputs ): plugins.append(tuple(row)) return plugins def run_enrichments(self, topic, topic_type, names=None, cost=10, speed=10, start_time=""): """ :param topic: topic to enrich (e.g. "1.1.1.1", "www.google.com") :param topic_type: type of topic (e.g. "ip", "domain") :param cost: integer 1-10 of resource cost of running the enrichment. (1 = cheapest) :param speed: integer 1-10 speed of enrichment. (1 = fastest) :param names: a name (as string) or a list of names of enrichments to use :return: None if storage configured (networkx graph representing the enrichment of the topic """ enrichments = self.get_enrichments([topic_type], cost, speed, configured=True) enrichments = [e[0] for e in enrichments] #print enrichments # DEBUG g = nx.MultiDiGraph() # IF a name(s) are given, subset to them if names: enrichments = set(enrichments).intersection(set(names)) for enrichment in enrichments: # get the plugin plugin = self.plugins.getPluginByName(enrichment) # run the plugin g2 = plugin.plugin_object.run(topic, start_time) # merge the graphs for node, props in g2.nodes(data=True): g.add_node(node, props) for edge in g2.edges(data=True): g.add_edge(edge[0], edge[1], attr_dict=edge[2]) return g ## INTERFACE FUNCTIONS def get_interfaces(self, configured=None): """ :return: list of strings of names of interface plugins """ cur = self.db.cursor() interfaces = list() if configured is None: for row in cur.execute('''SELECT DISTINCT name FROM storage;'''): interfaces.append(row[0]) else: for row in cur.execute('''SELECT DISTINCT name from storage WHERE configured=?;''', (int(configured),)): interfaces.append(row[0]) return interfaces def get_default_interface(self): return self.storage def set_interface(self, interface): """ :param interface: The name of the plugin to use for storage. Sets the storage backend to use. It must have been configured through a plugin prior to setting. """ cur = self.db.cursor() configured_storage = list() for row in cur.execute('''SELECT DISTINCT name FROM storage WHERE configured=1;'''): configured_storage.append(row[0]) if interface in configured_storage: self.storage = interface else: raise ValueError("Requested interface {0} not configured. Options are {1}.".format(interface, configured_storage)) def get_minions(self, cost=10000, configured=None): """ :param cost: a maximum cost of running the minion :param configured: True, False, or None (for both). :return: list of strings of tuples of (name, description) of minion plugins """ cur = self.db.cursor() minions = list() if configured is None: for row in cur.execute('''SELECT DISTINCT name, description FROM minion WHERE cost <= ?;''', [int(cost)]): minions.append(tuple(row[0:2])) else: for row in cur.execute('''SELECT DISTINCT name, description FROM minion WHERE cost <= ? AND configured=?;''', [int(cost), int(configured)]): minions.append(tuple(row[0:2])) return minions def start_minions(self, names=None, cost=10000): """ :param names: a list of names of minions to run :param cost: a maximum cost for minions """ minions = self.get_minions(cost=cost, configured=True) minions = [m[0] for m in minions] # IF a name(s) are given, subset to them if names: minions = set(minions).intersection(set(names)) for minion in minions: # get the plugin plugin = self.plugins.getPluginByName(minion) # start the plugin plugin.plugin_object.start() def get_running_minions(self): """ :return: A set of names of minions which are running """ minions = self.get_minions(cost=10000, configured=True) minions = [m[0] for m in minions] running_minions = set() # Iterate Through the minions for minion in minions: plugin = self.plugins.getPluginByName(minion) if plugin.plugin_object.isAlive(): running_minions.add(minion) return running_minions def stop_minions(self, names=None): minions = self.get_running_minions() if names is not None: minions = set(minions).intersection(set(names)) for minion in minions: # get the plugin plugin = self.plugins.getPluginByName(minion) # start the plugin plugin.plugin_object.stop() def run_query(self, topic, max_depth=4, dont_follow=['enrichment', 'classification'], storage=None): """ :param storage: the storage plugin to use :return: a networkx subgraph surrounded around the topic """ if not storage: storage = self.storage if not storage: raise ValueError("No storage set. run set_storage() to set or provide directly. Storage must be a configured plugin.") else: # get the plugin plugin = self.plugins.getPluginByName(self.storage) return plugin.plugin_object.query(topic, max_depth=max_depth, dont_follow=dont_follow) def store_graph(self, g, storage=None): """ :param g: a networkx graph to merge with the set storage """ if not storage: storage = self.storage if not storage: raise ValueError("No storage set. run set_storage() to set or provide directly. Storage must be a configured plugin.") else: # get the plugin plugin = self.plugins.getPluginByName(self.storage) # merge the graph plugin.plugin_object.enrich(g) ## SCORE FUNCTIONS def get_scoring_plugins(self, cost=10000, speed=10000, names=None, configured=True): """ :param cost: integer 1-10 of resource cost of running the enrichment. (1 = cheapest) :param speed: integer 1-10 speed of enrichment. (1 = fastest) :param enabled: Plugin is correctly configured. If false, plugin may not run correctly. :return: list of names of scoring plugins matching the criteria """ cur = self.db.cursor() plugins = list() if names is None: for row in cur.execute('''SELECT DISTINCT name FROM score WHERE cost <= ? AND speed <= ? AND configured = ?''', [cost, speed, int(configured)] ): plugins.append(row[0]) else: for row in cur.execute('''SELECT DISTINCT name FROM score WHERE cost <= ? AND speed <= ? AND configured = ? AND name IN ({0});'''.format(("?," * len(names))[:-1]), [cost, speed, int(configured)] + names ): plugins.append(row[0]) return plugins def score_subgraph(self, topic, sg, plugin_name=None): if plugin_name is None: plugin_name=self.score score_plugin = self.plugins.getPluginByName(plugin_name) return score_plugin.plugin_object.score(sg, topic) def set_scoring_plugin(self, plugin): """ :param interface: The name of the plugin to use for storage. Sets the storage backend to use. It must have been configured through a plugin prior to setting. """ cur = self.db.cursor() configured_scoring_plugins = list() for row in cur.execute('''SELECT DISTINCT name FROM score WHERE configured=1;'''): configured_scoring_plugins.append(row[0]) if plugin in configured_scoring_plugins: self.score = plugin else: raise ValueError("Requested scoring plugin {0} is not configured. Options are {1}.".format(plugin, configured_scoring_plugins)) def get_default_scoring_plugin(self): return self.score
class ModuleManager(): def __init__(self, threads, kill_list, kill_list_lock, job_list, binpath, modulebin): self.threads = threads self.kill_list = kill_list self.kill_list_lock = kill_list_lock self.job_list = job_list # Running jobs self.binpath = binpath self.module_bin_path = modulebin self.root_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '..', '..')) self.plugin_path = os.path.join(self.root_path, "lib", "assembly", "plugins") self.pmanager = PluginManager() locator = self.pmanager.getPluginLocator() locator.setPluginInfoExtension('asm-plugin') self.pmanager.setPluginPlaces([self.plugin_path]) self.pmanager.collectPlugins() self.pmanager.locatePlugins() self.plugins = ['none'] num_plugins = len(self.pmanager.getAllPlugins()) if num_plugins == 0: raise Exception("No Plugins Found!") plugins = [] self.executables = {} for plugin in self.pmanager.getAllPlugins(): plugin.threads = threads self.plugins.append(plugin.name) plugin.plugin_object.setname(plugin.name) ## Check for installed binaries try: version = plugin.details.get('Documentation', 'Version') executables = plugin.details.items('Executables') full_execs = [(k, self.get_executable_path(v)) for k, v in executables] for binary in full_execs: if not os.path.exists(binary[1]): if float(version) < 1: logger.warn( 'Third-party binary does not exist for beta plugin: {} (v{}) -- {}' .format(plugin.name, version, binary[1])) else: raise Exception( 'ERROR: Third-party binary does not exist for beta plugin: {} (v{}) -- {}' .format(plugin.name, version, binary[1])) self.executables[plugin.name] = full_execs except ConfigParser.NoSectionError: pass plugins.append(plugin.name) logger.info("Plugins found [{}]: {}".format(num_plugins, sorted(plugins))) def run_proc(self, module, wlink, job_data, parameters): """ Run module adapter for wasp interpreter To support the Job_data mechanism, injects wlink """ if not self.has_plugin(module): raise Exception("No plugin named {}".format(module)) plugin = self.pmanager.getPluginByName(module) config_settings = plugin.details.items('Settings') config_settings = update_settings(config_settings, parameters) try: settings = {k: v for k, v in self.executables[module]} for k, v in config_settings: ## Don't override if not k in settings: settings[k] = v settings = settings.items() except: # settings = config_settings raise Exception("Plugin Config not updated: {}!".format(module)) #### Check input/output type compatibility if wlink['link']: for link in wlink['link']: if not link: continue if link['module']: try: assert (self.output_type(link['module']) == self.input_type(module) or self.output_type( link['module']) in self.input_type(module)) except AssertionError: raise Exception( '{} and {} have mismatched input/output types'. format(module, link['module'])) #### Run job_data['wasp_chain'] = wlink output = plugin.plugin_object.base_call(settings, job_data, self) ot = self.output_type(module) wlink.insert_output(output, ot, plugin.name) if not wlink.output: raise Exception('"{}" module failed to produce {}'.format( module, ot)) ### Store any output values in job_data data = {'module': module, 'module_output': output} job_data['plugin_output'].append(data) def output_type(self, module): return self.pmanager.getPluginByName(module).plugin_object.OUTPUT def input_type(self, module): return self.pmanager.getPluginByName(module).plugin_object.INPUT def get_short_name(self, module): try: plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') for kv in settings: if kv[0] == 'short_name': sn = kv[1] break return sn except: return None def get_executable(self, module): try: plugin = self.pmanager.getPluginByName(module) settings = plugin.details.items('Settings') for kv in settings: if kv[0] == 'short_name': sn = kv[1] break return sn except: return None def verify_file(self, filename): if not os.path.exists(filename): raise Exception("File not found: %s" % filename) def get_executable_path(self, filename, verify=False): guess1 = os.path.join(self.module_bin_path, filename) guess2 = os.path.join(self.binpath, filename) fullname = guess1 if os.path.exists(guess1) else guess2 if verify: verify_file(fullname) return fullname def has_plugin(self, plugin): if not plugin.lower() in self.plugins: logger.error("{} plugin not found".format(plugin)) return False return True def valid_modules(self, l): """ Return filtered list of available modules """ return [m for m in l if not m.startswith('?') and self.has_plugin(m)] def validate_pipe(self, pipe): for stage in pipe: for word in stage.replace('+', ' ').split(' '): if not (word.startswith('?') or self.has_plugin(word)): raise Exception('Invalid pipeline command') def split_pipe(self, l): """ Splits a multi-module string in to bins Ex: 'kiki ?k=29 velvet' -> [[kiki, ?k=29], [velvet]] """ bins = [] for word in l: if not word.startswith('?'): bins.append([word]) elif word.startswith('?'): bins[-1].append(word) return bins def parse_input(self, pipe): """ Parses inital pipe and separates branching bins Ex: ['sga', '?p=True', 'kiki ?k=31 velvet', 'sspace'] """ stages = phelper.parse_branches(pipe) return stages
class Nikola(object): """Class that handles site generation. Takes a site config as argument on creation. """ def __init__(self, **config): """Setup proper environment for running tasks.""" self.global_data = {} self.posts_per_year = defaultdict(list) self.posts_per_tag = defaultdict(list) self.timeline = [] self.pages = [] self._scanned = False # This is the default config # TODO: fill it self.config = { 'ADD_THIS_BUTTONS': True, 'ANALYTICS': '', 'ARCHIVE_PATH': "", 'ARCHIVE_FILENAME': "archive.html", 'CACHE_FOLDER': 'cache', 'COMMENTS_IN_GALLERIES': False, 'COMMENTS_IN_STORIES': False, 'CONTENT_FOOTER': '', 'DATE_FORMAT': '%Y-%m-%d %H:%M', 'DEFAULT_LANG': "en", 'DEPLOY_COMMANDS': [], 'DISQUS_FORUM': 'nikolademo', 'FAVICONS': {}, 'FILE_METADATA_REGEXP': None, 'FILES_FOLDERS': {'files': ''}, 'FILTERS': {}, 'GALLERY_PATH': 'galleries', 'INDEX_DISPLAY_POST_COUNT': 10, 'INDEX_TEASERS': False, 'INDEXES_TITLE': "", 'INDEXES_PAGES': "", 'INDEX_PATH': '', 'LICENSE': '', 'LISTINGS_FOLDER': 'listings', 'MAX_IMAGE_SIZE': 1280, 'MATHJAX_CONFIG': '', 'OUTPUT_FOLDER': 'output', 'post_compilers': { "rest": ('.txt', '.rst'), "markdown": ('.md', '.mdown', '.markdown'), "textile": ('.textile',), "txt2tags": ('.t2t',), "bbcode": ('.bb',), "wiki": ('.wiki',), "ipynb": ('.ipynb',), "html": ('.html', '.htm') }, 'POST_PAGES': ( ("posts/*.txt", "posts", "post.tmpl", True), ("stories/*.txt", "stories", "story.tmpl", False), ), 'REDIRECTIONS': [], 'RSS_LINK': None, 'RSS_PATH': '', 'RSS_TEASERS': True, 'SEARCH_FORM': '', 'SLUG_TAG_PATH': True, 'STORY_INDEX': False, 'TAG_PATH': 'categories', 'TAG_PAGES_ARE_INDEXES': False, 'THEME': 'site', 'THUMBNAIL_SIZE': 180, 'USE_BUNDLES': True, 'USE_CDN': False, 'USE_FILENAME_AS_TITLE': True, 'TIMEZONE': None, } self.config.update(config) self.config['TRANSLATIONS'] = self.config.get('TRANSLATIONS', {self.config['DEFAULT_' 'LANG']: ''}) self.THEMES = utils.get_theme_chain(self.config['THEME']) self.MESSAGES = utils.load_messages(self.THEMES, self.config['TRANSLATIONS']) self.plugin_manager = PluginManager(categories_filter={ "Command": Command, "Task": Task, "LateTask": LateTask, "TemplateSystem": TemplateSystem, "PageCompiler": PageCompiler, }) self.plugin_manager.setPluginInfoExtension('plugin') self.plugin_manager.setPluginPlaces([ str(os.path.join(os.path.dirname(__file__), 'plugins')), str(os.path.join(os.getcwd(), 'plugins')), ]) self.plugin_manager.collectPlugins() self.commands = {} # Activate all command plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Command"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) pluginInfo.plugin_object.short_help = pluginInfo.description self.commands[pluginInfo.name] = pluginInfo.plugin_object # Activate all task plugins for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): self.plugin_manager.activatePluginByName(pluginInfo.name) pluginInfo.plugin_object.set_site(self) # set global_context for template rendering self.GLOBAL_CONTEXT = { } self.GLOBAL_CONTEXT['messages'] = self.MESSAGES self.GLOBAL_CONTEXT['_link'] = self.link self.GLOBAL_CONTEXT['rel_link'] = self.rel_link self.GLOBAL_CONTEXT['abs_link'] = self.abs_link self.GLOBAL_CONTEXT['exists'] = self.file_exists self.GLOBAL_CONTEXT['SLUG_TAG_PATH'] = self.config[ 'SLUG_TAG_PATH'] self.GLOBAL_CONTEXT['add_this_buttons'] = self.config[ 'ADD_THIS_BUTTONS'] self.GLOBAL_CONTEXT['index_display_post_count'] = self.config[ 'INDEX_DISPLAY_POST_COUNT'] self.GLOBAL_CONTEXT['use_bundles'] = self.config['USE_BUNDLES'] self.GLOBAL_CONTEXT['use_cdn'] = self.config.get("USE_CDN") self.GLOBAL_CONTEXT['favicons'] = self.config['FAVICONS'] self.GLOBAL_CONTEXT['date_format'] = self.config.get('DATE_FORMAT', '%Y-%m-%d %H:%M') self.GLOBAL_CONTEXT['blog_author'] = self.config.get('BLOG_AUTHOR') self.GLOBAL_CONTEXT['blog_title'] = self.config.get('BLOG_TITLE') self.GLOBAL_CONTEXT['blog_url'] = self.config.get('BLOG_URL') self.GLOBAL_CONTEXT['blog_desc'] = self.config.get('BLOG_DESCRIPTION') self.GLOBAL_CONTEXT['analytics'] = self.config.get('ANALYTICS') self.GLOBAL_CONTEXT['translations'] = self.config.get('TRANSLATIONS') self.GLOBAL_CONTEXT['license'] = self.config.get('LICENSE') self.GLOBAL_CONTEXT['search_form'] = self.config.get('SEARCH_FORM') self.GLOBAL_CONTEXT['disqus_forum'] = self.config.get('DISQUS_FORUM') self.GLOBAL_CONTEXT['mathjax_config'] = self.config.get('MATHJAX_CONFIG') self.GLOBAL_CONTEXT['content_footer'] = self.config.get('CONTENT_FOOTER') self.GLOBAL_CONTEXT['rss_path'] = self.config.get('RSS_PATH') self.GLOBAL_CONTEXT['rss_link'] = self.config.get('RSS_LINK') self.GLOBAL_CONTEXT['sidebar_links'] = self.config.get('SIDEBAR_LINKS') self.GLOBAL_CONTEXT['twitter_card'] = self.config.get('TWITTER_CARD', {}) self.GLOBAL_CONTEXT.update(self.config.get('GLOBAL_CONTEXT', {})) # check if custom css exist and is not empty for files_path in list(self.config['FILES_FOLDERS'].keys()): custom_css_path = os.path.join(files_path, 'assets/css/custom.css') if self.file_exists(custom_css_path, not_empty=True): self.GLOBAL_CONTEXT['has_custom_css'] = True break else: self.GLOBAL_CONTEXT['has_custom_css'] = False # Load template plugin template_sys_name = utils.get_template_engine(self.THEMES) pi = self.plugin_manager.getPluginByName( template_sys_name, "TemplateSystem") if pi is None: sys.stderr.write("Error loading {0} template system " "plugin\n".format(template_sys_name)) sys.exit(1) self.template_system = pi.plugin_object lookup_dirs = [os.path.join(utils.get_theme_path(name), "templates") for name in self.THEMES] self.template_system.set_directories(lookup_dirs, self.config['CACHE_FOLDER']) # Load compiler plugins self.compilers = {} self.inverse_compilers = {} for pluginInfo in self.plugin_manager.getPluginsOfCategory( "PageCompiler"): self.compilers[pluginInfo.name] = \ pluginInfo.plugin_object.compile_html def get_compiler(self, source_name): """Get the correct compiler for a post from `conf.post_compilers` To make things easier for users, the mapping in conf.py is compiler->[extensions], although this is less convenient for us. The majority of this function is reversing that dictionary and error checking. """ ext = os.path.splitext(source_name)[1] try: compile_html = self.inverse_compilers[ext] except KeyError: # Find the correct compiler for this files extension langs = [lang for lang, exts in list(self.config['post_compilers'].items()) if ext in exts] if len(langs) != 1: if len(set(langs)) > 1: exit("Your file extension->compiler definition is" "ambiguous.\nPlease remove one of the file extensions" "from 'post_compilers' in conf.py\n(The error is in" "one of {0})".format(', '.join(langs))) elif len(langs) > 1: langs = langs[:1] else: exit("post_compilers in conf.py does not tell me how to " "handle '{0}' extensions.".format(ext)) lang = langs[0] compile_html = self.compilers[lang] self.inverse_compilers[ext] = compile_html return compile_html def render_template(self, template_name, output_name, context): local_context = {} local_context["template_name"] = template_name local_context.update(self.GLOBAL_CONTEXT) local_context.update(context) data = self.template_system.render_template( template_name, None, local_context) assert isinstance(output_name, bytes) assert output_name.startswith( self.config["OUTPUT_FOLDER"].encode('utf8')) url_part = output_name.decode('utf8')[len(self.config["OUTPUT_FOLDER"]) + 1:] # This is to support windows paths url_part = "/".join(url_part.split(os.sep)) src = urljoin(self.config["BLOG_URL"], url_part) parsed_src = urlsplit(src) src_elems = parsed_src.path.split('/')[1:] def replacer(dst): # Refuse to replace links that are full URLs. dst_url = urlparse(dst) if dst_url.netloc: if dst_url.scheme == 'link': # Magic link dst = self.link(dst_url.netloc, dst_url.path.lstrip('/'), context['lang']) else: return dst # Normalize dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break # Now i is the longest common prefix result = '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) if not result: result = "." # Don't forget the fragment (anchor) part of the link if parsed_dst.fragment: result += "#" + parsed_dst.fragment assert result, (src, dst, i, src_elems, dst_elems) return result try: os.makedirs(os.path.dirname(output_name)) except: pass doc = lxml.html.document_fromstring(data) doc.rewrite_links(replacer) data = b'<!DOCTYPE html>' + lxml.html.tostring(doc, encoding='utf8') with open(output_name, "wb+") as post_file: post_file.write(data) def path(self, kind, name, lang, is_link=False): """Build the path to a certain kind of page. kind is one of: * tag_index (name is ignored) * tag (and name is the tag name) * tag_rss (name is the tag name) * archive (and name is the year, or None for the main archive index) * index (name is the number in index-number) * rss (name is ignored) * gallery (name is the gallery name) * listing (name is the source code file name) * post_path (name is 1st element in a post_pages tuple) The returned value is always a path relative to output, like "categories/whatever.html" If is_link is True, the path is absolute and uses "/" as separator (ex: "/archive/index.html"). If is_link is False, the path is relative to output and uses the platform's separator. (ex: "archive\\index.html") """ path = [] if kind == "tag_index": path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], 'index.html'] if _f] elif kind == "tag": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".html"] if _f] elif kind == "tag_rss": if self.config['SLUG_TAG_PATH']: name = utils.slugify(name) path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['TAG_PATH'], name + ".xml"] if _f] elif kind == "index": if name not in [None, 0]: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index-{0}.html'.format(name)] if _f] else: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['INDEX_PATH'], 'index.html'] if _f] elif kind == "post_path": path = [_f for _f in [self.config['TRANSLATIONS'][lang], os.path.dirname(name), "index.html"] if _f] elif kind == "rss": path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['RSS_PATH'], 'rss.xml'] if _f] elif kind == "archive": if name: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], name, 'index.html'] if _f] else: path = [_f for _f in [self.config['TRANSLATIONS'][lang], self.config['ARCHIVE_PATH'], self.config['ARCHIVE_FILENAME']] if _f] elif kind == "gallery": path = [_f for _f in [self.config['GALLERY_PATH'], name, 'index.html'] if _f] elif kind == "listing": path = [_f for _f in [self.config['LISTINGS_FOLDER'], name + '.html'] if _f] if is_link: return '/' + ('/'.join(path)) else: return os.path.join(*path) def link(self, *args): return self.path(*args, is_link=True) def abs_link(self, dst): # Normalize dst = urljoin(self.config['BLOG_URL'], dst) return urlparse(dst).path def rel_link(self, src, dst): # Normalize src = urljoin(self.config['BLOG_URL'], src) dst = urljoin(src, dst) # Avoid empty links. if src == dst: return "#" # Check that link can be made relative, otherwise return dest parsed_src = urlsplit(src) parsed_dst = urlsplit(dst) if parsed_src[:2] != parsed_dst[:2]: return dst # Now both paths are on the same site and absolute src_elems = parsed_src.path.split('/')[1:] dst_elems = parsed_dst.path.split('/')[1:] i = 0 for (i, s), d in zip(enumerate(src_elems), dst_elems): if s != d: break else: i += 1 # Now i is the longest common prefix return '/'.join(['..'] * (len(src_elems) - i - 1) + dst_elems[i:]) def file_exists(self, path, not_empty=False): """Returns True if the file exists. If not_empty is True, it also has to be not empty.""" exists = os.path.exists(path) if exists and not_empty: exists = os.stat(path).st_size > 0 return exists def gen_tasks(self): task_dep = [] for pluginInfo in self.plugin_manager.getPluginsOfCategory("Task"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) for pluginInfo in self.plugin_manager.getPluginsOfCategory("LateTask"): for task in pluginInfo.plugin_object.gen_tasks(): yield task if pluginInfo.plugin_object.is_default: task_dep.append(pluginInfo.plugin_object.name) yield { 'name': b'all', 'actions': None, 'clean': True, 'task_dep': task_dep } def scan_posts(self): """Scan all the posts.""" if not self._scanned: print("Scanning posts", end='') tzinfo = None if self.config['TIMEZONE'] is not None: tzinfo = pytz.timezone(self.config['TIMEZONE']) targets = set([]) for wildcard, destination, template_name, use_in_feeds in \ self.config['post_pages']: print(".", end='') base_len = len(destination.split(os.sep)) dirname = os.path.dirname(wildcard) for dirpath, _, _ in os.walk(dirname): dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) dest_dir = os.path.join(*([destination] + dirpath.split( os.sep)[base_len:])) for base_path in glob.glob(dir_glob): post = Post( base_path, self.config['CACHE_FOLDER'], dest_dir, use_in_feeds, self.config['TRANSLATIONS'], self.config['DEFAULT_LANG'], self.config['BLOG_URL'], self.MESSAGES, template_name, self.config['FILE_METADATA_REGEXP'], tzinfo, ) for lang, langpath in list( self.config['TRANSLATIONS'].items()): dest = (destination, langpath, dir_glob, post.pagenames[lang]) if dest in targets: raise Exception('Duplicated output path {0!r} ' 'in post {1!r}'.format( post.pagenames[lang], base_path)) targets.add(dest) self.global_data[post.post_name] = post if post.use_in_feeds: self.posts_per_year[ str(post.date.year)].append(post.post_name) for tag in post.tags: self.posts_per_tag[tag].append(post.post_name) else: self.pages.append(post) for name, post in list(self.global_data.items()): self.timeline.append(post) self.timeline.sort(key=lambda p: p.date) self.timeline.reverse() post_timeline = [p for p in self.timeline if p.use_in_feeds] for i, p in enumerate(post_timeline[1:]): p.next_post = post_timeline[i] for i, p in enumerate(post_timeline[:-1]): p.prev_post = post_timeline[i + 1] self._scanned = True print("done!") def generic_page_renderer(self, lang, post, filters): """Render post fragments to final HTML pages.""" context = {} deps = post.deps(lang) + \ self.template_system.template_deps(post.template_name) context['post'] = post context['lang'] = lang context['title'] = post.title(lang) context['description'] = post.description(lang) context['permalink'] = post.permalink(lang) context['page_list'] = self.pages if post.use_in_feeds: context['enable_comments'] = True else: context['enable_comments'] = self.config['COMMENTS_IN_STORIES'] output_name = os.path.join(self.config['OUTPUT_FOLDER'], post.destination_path(lang)).encode('utf8') deps_dict = copy(context) deps_dict.pop('post') if post.prev_post: deps_dict['PREV_LINK'] = [post.prev_post.permalink(lang)] if post.next_post: deps_dict['NEXT_LINK'] = [post.next_post.permalink(lang)] deps_dict['OUTPUT_FOLDER'] = self.config['OUTPUT_FOLDER'] deps_dict['TRANSLATIONS'] = self.config['TRANSLATIONS'] deps_dict['global'] = self.GLOBAL_CONTEXT deps_dict['comments'] = context['enable_comments'] task = { 'name': output_name, 'file_dep': deps, 'targets': [output_name], 'actions': [(self.render_template, [post.template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_dict)], } yield utils.apply_filters(task, filters) def generic_post_list_renderer(self, lang, posts, output_name, template_name, filters, extra_context): """Renders pages with lists of posts.""" # This is a name on disk, has to be bytes assert isinstance(output_name, bytes) deps = self.template_system.template_deps(template_name) for post in posts: deps += post.deps(lang) context = {} context["posts"] = posts context["title"] = self.config['BLOG_TITLE'] context["description"] = self.config['BLOG_DESCRIPTION'] context["lang"] = lang context["prevlink"] = None context["nextlink"] = None context.update(extra_context) deps_context = copy(context) deps_context["posts"] = [(p.titles[lang], p.permalink(lang)) for p in posts] deps_context["global"] = self.GLOBAL_CONTEXT task = { 'name': output_name, 'targets': [output_name], 'file_dep': deps, 'actions': [(self.render_template, [template_name, output_name, context])], 'clean': True, 'uptodate': [config_changed(deps_context)] } return utils.apply_filters(task, filters)
plugins_paths = ["openbci/plugins"] if args.plugins_path: plugins_paths += args.plugins_path manager.setPluginPlaces(plugins_paths) manager.collectPlugins() # Print list of available plugins and exit if args.list: print("Available plugins:") for plugin in manager.getAllPlugins(): print("\t- " + plugin.name) exit() # User wants more info about a plugin... if args.info: plugin = manager.getPluginByName(args.info) if plugin == None: # eg: if an import fail inside a plugin, yapsy skip it print( "Error: [ " + args.info + " ] not found or could not be loaded. Check name and requirements." ) else: print(plugin.description) plugin.plugin_object.show_help() exit() print("\n------------SETTINGS-------------") print("Notch filtering:" + str(args.filtering)) # Logging
class NimbusPI(object): """The NimbusPi Weather Station""" # Current NimbusPI Version VERSION = "0.1.0-rc1" def __init__(self, config='nimbus.cfg'): """Initializes the NimbusPI Weather Station""" self.sensors = dict() self.broadcasters = dict() self.threads = [] # Initialize a named logger self.__logger = logging.getLogger('nimbuspi') # Load our config defaults self.config = configparser.ConfigParser(allow_no_value=True) self.config.add_section('station') self.config.set('station', 'name', 'N/A') self.config.set('station', 'location', 'N/A') self.config.set('station', 'longitude', '0.000000') self.config.set('station', 'latitude', '0.000000') self.config.set('station', 'altitude', '0') self.config.add_section('sensors') self.config.add_section('broadcasters') # Load the provided config file if not os.path.isfile(config): self.__logger.warn('Configuration file "%s" not found!', config) else: self.__logger.debug('Loading configuration from "%s"', config) self.config.read(config) # Get our station details self.__logger.debug(' name :: %s', self.config.get('station', 'name')) self.__logger.debug(' location :: %s', self.config.get('station', 'location')) self.__logger.debug(' latitude :: %s', self.config.get('station', 'latitude')) self.__logger.debug(' longitude :: %s', self.config.get('station', 'longitude')) self.__logger.debug(' altitude :: %s feet', self.config.get('station', 'altitude')) self.__logger.debug('Sensors Configured:') for sensor in self.config.options('sensors'): self.__logger.debug(' %s', sensor) self.__logger.debug('Broadcasters Configured:') for broadcaster in self.config.options('broadcasters'): self.__logger.debug(' %s', broadcaster) # Search for available plugins self.__logger.debug("Searching for available plugins...") self.__plugins = PluginManager(plugin_info_ext='info') self.__plugins.setPluginPlaces([ './sensors', './broadcasters', './nimbuspi/sensors', './nimbuspi/broadcasters' ]) self.__plugins.setCategoriesFilter({ plugins.ISensorPlugin.CATEGORY : plugins.ISensorPlugin, plugins.IBroadcasterPlugin.CATEGORY : plugins.IBroadcasterPlugin }) self.__plugins.collectPlugins() for plugin in self.__plugins.getAllPlugins(): self.__logger.debug(" %s (%s)", plugin.name, plugin.path) plugin.plugin_object.set_nimbus(self) self.__logger.debug("%d plugins available", len(self.__plugins.getAllPlugins())) def run(self): """Runs the NimbusPI Weather Station loop""" self.__logger.debug('-' * 80) self.__logger.info('NimbusPI Weather Station v%s', self.VERSION) self.__logger.info('-' * 80) # Load all configured sensor plugins self.__logger.info("Activating sensor plugins...") for sensor in self.config.options('sensors'): try: self.activate_sensor(sensor) except LookupError: self.__logger.error("Could not load sensor '%s'", sensor) return if len(self.sensors) <= 0: self.__logger.error('Cannot continue - no sensors configured') return # Load all configured broadcaster plugins self.__logger.info("Activating broadcaster plugins...") for broadcaster in self.config.options('broadcasters'): try: self.activate_broadcaster(broadcaster) except LookupError: self.__logger.error("Could not load broadcaster '%s'", broadcaster) return if len(self.broadcasters) <= 0: self.__logger.error('Cannot continue - no broadcasters configured') return # # Thread run loop until keyboard interrupt self.__logger.debug("Entering thread loop") while len(self.threads) > 0: try: self.threads = [t.join(30) for t in self.threads if t is not None and t.isAlive()] except (KeyboardInterrupt, SystemExit): self.__logger.info("Shutting down plugins (this may take a minute)...") for thread in self.threads: thread.stop() self.__logger.debug("Exiting thread loop") # Deactivate plugins self.__logger.debug("Deactivating sensors") sensors = self.sensors.keys() for sensor in sensors: self.deactivate_sensor(sensor) self.__logger.debug("Deactivating broadcasters") broadcasters = self.broadcasters.keys() for broadcaster in broadcasters: self.deactivate_broadcaster(broadcaster) def activate_sensor(self, sensor): """Activates a sensor on the service""" if sensor in self.sensors: self.__logger.warn("Cannot activate sensor '%s' - sensor already active", sensor) return False self.__logger.debug("Activating sensor '%s'", sensor) self.sensors[sensor] = self.__plugins.getPluginByName(sensor, plugins.ISensorPlugin.CATEGORY) if not self.sensors[sensor]: raise LookupError self.__plugins.activatePluginByName(sensor, plugins.ISensorPlugin.CATEGORY) self.threads.append(self.sensors[sensor].plugin_object.thread) return True def deactivate_sensor(self, sensor): """Deactivates a sensor on the service""" if sensor not in self.sensors: self.__logger.warn("Cannot deactivate sensor '%s' - sensor not active", sensor) return False self.__logger.debug("Deactivating sensor '%s'", sensor) if self.sensors[sensor].plugin_object.thread: self.sensors[sensor].plugin_object.thread.stop() self.__plugins.deactivatePluginByName(sensor, plugins.ISensorPlugin.CATEGORY) del self.sensors[sensor] return True def activate_broadcaster(self, broadcaster): """Activates a broadcaster on the service""" if broadcaster in self.broadcasters: self.__logger.warn("Cannot activate broadcaster '%s' - broadcaster already active", broadcaster) return False self.__logger.debug("Activating broadcaster '%s'", broadcaster) self.broadcasters[broadcaster] = self.__plugins.getPluginByName( broadcaster, plugins.IBroadcasterPlugin.CATEGORY ) if not self.broadcasters[broadcaster]: raise LookupError self.__plugins.activatePluginByName(broadcaster, plugins.IBroadcasterPlugin.CATEGORY) self.threads.append(self.broadcasters[broadcaster].plugin_object.thread) return True def deactivate_broadcaster(self, broadcaster): """Deactivates a broadcaster on the service""" if broadcaster not in self.broadcasters: self.__logger.warn("Cannot deactivate broadcaster '%s' - broadcaster not active", broadcaster) return False self.__logger.debug("Deactivating broadcaster '%s'", broadcaster) if self.broadcasters[broadcaster].plugin_object.thread: self.broadcasters[broadcaster].plugin_object.thread.stop() self.__plugins.deactivatePluginByName(broadcaster, plugins.IBroadcasterPlugin.CATEGORY) del self.broadcasters[broadcaster] return True def get_states(self): """Returns the current state of all sensors""" states = dict() # Add our station configuration information as well states['config'] = dict() for option in self.config.options('station'): states['config'][option] = self.config.get('station', option) # Add all current plugin states for sensor in self.sensors: states[sensor] = self.sensors[sensor].plugin_object.get_state() return states