def get_handler(logger, conf): if not Data.is_element(conf): logger.error("Malformed log handler:\n{1}".format(repr(conf))) return type = conf.get("type") if type is None or not isinstance( type, basestring) or type.lower() not in _HANDLERS.keys(): logger.error("Unknown or unsupported handler type: {0}\n{1}".format( type, repr(conf))) return handler = _HANDLERS[type](conf) level = conf.get("level") if level is not None: handler.setLevel(get_level(level)) format = conf.get("format", _DEFAULT_FORMAT) if format is not None: if Data.is_list(format): format = "".join(format.to_native()) handler.setFormatter(logging.Formatter(format)) return handler
def __init__(self, conf): JobManager.__init__(self, "saga", conf) self._file_url = self._conf.get("files_url", "file://") self._remote_path = self._conf.get("remote_path", self._work_path) self._output_path = os.path.join(self._work_path, "output") self._remote_output_path = os.path.join(self._remote_path, "output") self._pe = self._conf.get("pe") self._cpu_count = self._conf.get("cpu_count", 1) self._queue = self._conf.get("queue") self._project = self._conf.get("project") self._working_directory = self._conf.get("working_directory") self._state_check_interval = self._conf.get("state_check_interval", 5) ctx_conf = self._conf.get("context") if ctx_conf is not None and not (Data.is_element(ctx_conf) or Data.is_list(ctx_conf)): raise ConfigTypeError("context", ctx_conf) self._session = None self._job_service = None self._queued_count = 0 self._max_queued = self._conf.get("max_queued", 0) self._running = False self._run_thread = None self._join_thread = None
def _plain_conf(self, value, path=None): if path is None: path = [] if not Data.is_element(value): yield (".".join(path), value) else: for key in value.keys(): for k, v in self._plain_conf(value[key], path + [key]): yield (k, v)
def __init__(self, rule, base_path=None, platform=None): rule = Data.create(rule) self.on = rule.get("on", {}) if isinstance(self.on, basestring): self.on = dict(task=self.on) if platform is not None: self.on["platform"] = platform self.dels = rule.get("del", default=Data.list) if not Data.is_list(self.dels): raise Exception( "Expected a list of strings for del operations of rule: {}". format(repr(rule))) for k in self.dels: if not isinstance(k, basestring): raise Exception( "Expected a list of strings for del operations of rule: {}" .format(repr(rule))) self.set = rule.get("set", default=Data.list) if not Data.is_list(self.dels): raise Exception( "Expected a list of tuples [key, value] for set operations of rule: {}" .format(repr(rule))) for s in self.set: if not Data.is_list(s) or len(s) != 2: raise Exception( "Expected a list of tuples [key, value] for set operations of rule: {}" .format(repr(rule))) self.merge = rule.get("merge") if isinstance(self.merge, basestring): if not os.path.isabs(self.merge): if base_path is None: raise Exception( "Configuration rule merge path should be absolute path: {}" .format(self.merge)) else: self.merge = os.path.join(base_path, self.merge) if not os.path.isfile(self.merge): raise Exception( "Configuration rule merge path not found: {}".format( self.merge)) self.merge = ConfigLoader(os.path.join(base_path or "", self.merge)).load() if self.merge is not None and not Data.is_element(self.merge): raise Exception( "Expected a dictionary for merge operation of rule: {}".format( repr(rule)))
def _create_platforms(self): """ Creates the platform according to the configuration :return: Platform """ platform_confs = self._conf.get("platforms") if platform_confs is None: platform_confs = Data.list() elif not Data.is_list(platform_confs): self._log.error("Wrong configuration type for 'platforms': {}".format(platform_confs)) platform_confs = Data.list() if len(platform_confs) == 0: platform_confs += [Data.element(dict(type="local"))] platforms = [] names = {} for pidx, platform_conf in enumerate(platform_confs): if isinstance(platform_conf, basestring): if not os.path.isabs(platform_conf) and self._conf_base_path is not None: platform_conf = os.path.join(self._conf_base_path, platform_conf) platform_conf = ConfigLoader(platform_conf).load() if not Data.is_element(platform_conf): raise errors.ConfigTypeError("wok.platforms[{}]".format(pidx, platform_conf)) ptype = platform_conf.get("type", "local") name = platform_conf.get("name", ptype) if name in names: name = "{}-{}".format(name, names[name]) names[name] += 1 else: names[name] = 2 platform_conf["name"] = name if "work_path" not in platform_conf: platform_conf["work_path"] = os.path.join(self._work_path, "platform_{}".format(name)) self._log.info("Creating '{}' platform ...".format(name)) self._log.debug("Platform configuration: {}".format(repr(platform_conf))) platforms += [create_platform(ptype, platform_conf)] return platforms
def initialize(self): self._log.info("Initializing projects ...") if Data.is_element(self.conf): iter_conf = self._iter_dict(self.conf) elif Data.is_list(self.conf): iter_conf = self._iter_list(self.conf) else: iter_conf = iter([]) for name, pdesc in iter_conf: if isinstance(pdesc, basestring): pdesc = self._load_project_desc(pdesc, self.base_path) self._add_project_desc(pdesc, self.base_path) for name, project in sorted(self._projects.items(), key=lambda x: x[0]): project.initialize()
def _load_project_desc(self, path, base_path=None): if not os.path.isabs(path): if base_path is not None: path = os.path.join(base_path, path) else: path = os.path.abspath(path) if not os.path.exists(path): raise Exception("Project path not found: {}".format(path)) if os.path.isdir(path): path = os.path.join(path, "project.conf") if not os.path.isfile(path): raise Exception("Project configuration not found: {}".format(path)) project = Data.element() project.merge(ConfigLoader(path).load()) base_path = os.path.dirname(path) if "path" not in project: project["path"] = base_path if not os.path.isabs(project["path"]): project["path"] = os.path.normpath( os.path.join(base_path, project["path"])) if "conf" in project and isinstance(project["conf"], basestring): conf_path = os.path.join(base_path, project["conf"]) project["conf"] = ConfigLoader(conf_path).load() if "conf_rules" in project and isinstance(project["conf_rules"], basestring): base_path = os.path.dirname(path) conf_path = os.path.join(base_path, project["conf_rules"]) project["conf_rules"] = ConfigLoader(conf_path).load() if "conf_rules" in project and Data.is_list(project["conf_rules"]): for rule in project["conf_rules"]: if Data.is_element(rule) and "merge" in rule and isinstance( rule["merge"], basestring): rule["merge"] = ConfigLoader( os.path.join(base_path, rule["merge"])).load() return project
def initialize(conf=None, format=None, datefmt=None, level=None): """ Initialize the logging system. If conf is a dictionary then the parameters considered for configuration are: - format: Logger format - datefmt: Date format - loggers: list of tuples (name, conf) to configure loggers If conf is a list then only the loggers are configured. If conf is an string then the default logger is configured for the logging level. """ global _initialized if conf is None: conf = Data.element() elif not isinstance(conf, basestring): conf = Data.create(conf) if Data.is_list(conf): loggers_conf = conf conf = Data.element() elif Data.is_element(conf): loggers_conf = conf.get("loggers", default=Data.list) elif isinstance(conf, basestring): loggers_conf = Data.list([["", conf]]) conf = Data.element() format = format or conf.get("format", _DEFAULT_FORMAT) datefmt = datefmt or conf.get("datefmt", _DEFAULT_DATEFMT) logging.basicConfig(format=format, datefmt=datefmt) for (log_name, log_conf) in loggers_conf: init_logger(log_name, conf=log_conf) if level is not None: init_logger("", conf=level) _initialized = True
def _start(self): self._log.debug("Creating session ...") self._session = saga.Session() ctxs_conf = self._conf.get("context") if ctxs_conf is not None: if Data.is_element(ctxs_conf): ctxs_conf = Data.list([ctxs_conf]) for ctx_conf in ctxs_conf: try: ctx = saga.Context(ctx_conf["type"]) for key in ctx_conf: if hasattr(ctx, key): setattr(ctx, key, ctx_conf[key]) self._session.add_context(ctx) except Exception as ex: self._log.error("Wrong context configuration: {}".format(repr(ctx_conf))) self._log.exception(ex) self._log.debug("Creating job service ...") url = self._conf.get("service_url", "fork://localhost", dtype=str) self._job_service = saga.job.Service(url, session=self._session) self._remote_dir = saga.filesystem.Directory(self._file_url, session=self._session) # FIXME Use the logging configuration mechanisms of SAGA from wok import logger logger.init_logger("SGEJobService", conf=Data.element(dict(level=self._conf.get("saga_log.level", "error")))) # TODO count the number of previously queued jobs # TODO clean output files ? self._running = True self._run_thread = threading.Thread(target=self._run_handler, name="{}-run".format(self._name)) self._join_thread = threading.Thread(target=self._join_handler, name="{}-join".format(self._name)) self._run_thread.start() self._join_thread.start()
def _load_project_desc(self, path, base_path=None): if not os.path.isabs(path): if base_path is not None: path = os.path.join(base_path, path) else: path = os.path.abspath(path) if not os.path.exists(path): raise Exception("Project path not found: {}".format(path)) if os.path.isdir(path): path = os.path.join(path, "project.conf") if not os.path.isfile(path): raise Exception("Project configuration not found: {}".format(path)) project = Data.element() project.merge(ConfigLoader(path).load()) base_path = os.path.dirname(path) if "path" not in project: project["path"] = base_path if not os.path.isabs(project["path"]): project["path"] = os.path.normpath(os.path.join(base_path, project["path"])) if "conf" in project and isinstance(project["conf"], basestring): conf_path = os.path.join(base_path, project["conf"]) project["conf"] = ConfigLoader(conf_path).load() if "conf_rules" in project and isinstance(project["conf_rules"], basestring): base_path = os.path.dirname(path) conf_path = os.path.join(base_path, project["conf_rules"]) project["conf_rules"] = ConfigLoader(conf_path).load() if "conf_rules" in project and Data.is_list(project["conf_rules"]): for rule in project["conf_rules"]: if Data.is_element(rule) and "merge" in rule and isinstance(rule["merge"], basestring): rule["merge"] = ConfigLoader(os.path.join(base_path, rule["merge"])).load() return project
def init_logger(logger, conf): """ Initializa a logger from configuration. Configuration can be: - An string referring to the log level - A dictionary with the following parameters: - level: log level - handlers: List of log handlers or just a handler. Each handler have the following parameters: - type - ...: each handler type has a set of parameters Supported handlers: - smtp: Send logs by email. Parameters: - host - port (optional) - user - pass - from - to - subject - level - format: can be a simple string or a list of strings that will be joint with '\n' """ if isinstance(logger, basestring): logger = get_logger(logger) if isinstance(conf, basestring): conf = Data.element(dict(level=conf)) else: conf = Data.create(conf) level = conf.get("level") if level is not None: logger.setLevel(get_level(level)) handlers_conf = conf.get("handlers", default=Data.list) if Data.is_element(handlers_conf): handlers_conf = Data.list([handlers_conf]) for handler_conf in handlers_conf: handler = get_handler(logger, handler_conf) logger.addHandler(handler)
def get_handler(logger, conf): if not Data.is_element(conf): logger.error("Malformed log handler:\n{1}".format(repr(conf))) return type = conf.get("type") if type is None or not isinstance(type, basestring) or type.lower() not in _HANDLERS.keys(): logger.error("Unknown or unsupported handler type: {0}\n{1}".format(type, repr(conf))) return handler = _HANDLERS[type](conf) level = conf.get("level") if level is not None: handler.setLevel(get_level(level)) format = conf.get("format", _DEFAULT_FORMAT) if format is not None: if Data.is_list(format): format = "".join(format.to_native()) handler.setFormatter(logging.Formatter(format)) return handler
def __init__(self, rule, base_path=None, platform=None): rule = Data.create(rule) self.on = rule.get("on", {}) if isinstance(self.on, basestring): self.on = dict(task=self.on) if platform is not None: self.on["platform"] = platform self.dels = rule.get("del", default=Data.list) if not Data.is_list(self.dels): raise Exception("Expected a list of strings for del operations of rule: {}".format(repr(rule))) for k in self.dels: if not isinstance(k, basestring): raise Exception("Expected a list of strings for del operations of rule: {}".format(repr(rule))) self.set = rule.get("set", default=Data.list) if not Data.is_list(self.dels): raise Exception("Expected a list of tuples [key, value] for set operations of rule: {}".format(repr(rule))) for s in self.set: if not Data.is_list(s) or len(s) != 2: raise Exception("Expected a list of tuples [key, value] for set operations of rule: {}".format(repr(rule))) self.merge = rule.get("merge") if isinstance(self.merge, basestring): if not os.path.isabs(self.merge): if base_path is None: raise Exception("Configuration rule merge path should be absolute path: {}".format(self.merge)) else: self.merge = os.path.join(base_path, self.merge) if not os.path.isfile(self.merge): raise Exception("Configuration rule merge path not found: {}".format(self.merge)) self.merge = ConfigLoader(os.path.join(base_path or "", self.merge)).load() if self.merge is not None and not Data.is_element(self.merge): raise Exception("Expected a dictionary for merge operation of rule: {}".format(repr(rule)))
def get_project_conf(conf, project, key, default=None, dtype=None): value = conf.get(key, default=default, dtype=dtype) if not Data.is_element(project): project = Data.create(project) return project.get(key, default=value, dtype=dtype)
def init_project(logger, config, paths, storage, project): project_id = project["id"] results_path = paths.results_path() project_path = paths.project_path(project_id) project_temp_path = paths.project_temp_path(project_path) if config.results.purge_on_start: logger.info(" Purging previous results ...") if os.path.isdir(project_path): logger.info(" {} ...".format(os.path.relpath(project_path, results_path))) shutil.rmtree(project_path) #if os.path.isdir(project_temp_path): # logger.info(" {} ...".format(os.path.relpath(project_temp_path, results_path))) # shutil.rmtree(project_temp_path) for obj_name in storage.list_objects(prefix="results/"): logger.info(" {} ...".format(obj_name)) storage.delete_object("results/{}".format(obj_name)) ensure_path_exists(project_path) ensure_path_exists(project_temp_path) projdb_path = os.path.join(project_path, "project.db") if "annotations" in project: annotations = project["annotations"] if not Data.is_element(annotations): logger.warn("Overriding project annotations field with an empty dictionary") project["annotations"] = annotations = Data.element() else: project["annotations"] = annotations = Data.element() # for backward compatibility for key in project.keys(): if key not in ["id", "assembly", "files", "storage_objects", "annotations", "conf", "oncodriveclust", "oncodrivefm"]: value = project[key] del project[key] annotations[key] = value project["conf"] = pconf = project.get("conf") or Data.element() if not Data.is_element(pconf): logger.warn("Overriding project conf field with an empty dictionary") project["conf"] = pconf = Data.element() # for backward compatibility for key in project.keys(): if key in ["oncodriveclust", "oncodrivefm"]: value = project[key] del project[key] pconf[key] = value project["path"] = project_path project["temp_path"] = project_temp_path project["db"] = projdb_path if "assembly" not in project: project["assembly"] = DEFAULT_ASSEMBLY missing_objects = [] for obj_name in project["storage_objects"]: if not storage.exists_object(obj_name): missing_objects += [obj_name] if len(missing_objects) > 0: raise InternalError("Project {0} references some missing objects:\n{1}".format(project_id, "\n".join(missing_objects))) project["files"] = [str(f) for f in project["files"]] #unicode is not json serializable project["storage_objects"] = [str(f) for f in project["storage_objects"]] #unicode is not json serializable project = project.to_native() # save project.conf projres = ProjectResults(project) projres.save_def() return project