def download(self, target_path, prefix=None, delimiter=None, include=None, exclude=None, start_callback=None): """ Download to a local path the selected objects in the container. :param prefix: download only the objects which name starts with this prefix. Useful for pseudo-hierarchical navigation. :param delimiter: download distinct objects with names up to either the first delimiter or the end. Useful for pseudo-hierarchical navigation. """ for obj_name in self.list_objects(prefix, delimiter): if include is not None and not self.__filename_match(obj_name, include)\ or exclude is not None and self.__filename_match(obj_name, exclude): continue path = os.path.normpath(obj_name) path = os.path.normpath("{}/{}".format(target_path, obj_name)) if os.path.relpath(path, target_path).startswith(".."): get_logger(__name__).warn("Object output path goes outside the target path: {}".format(path)) continue #print "***", obj_name, "--->", path base_path = os.path.dirname(path) if not os.path.exists(base_path): os.makedirs(base_path) prefixed_obj_name = (prefix or "") + obj_name obj = self.get_object(prefixed_obj_name) if obj.exists(): start_callback(obj_name) obj.get_data(path)
def new_function(*args, **kw): from wok.logger import get_logger get_logger(None, "wok").debug("acquire %s" % f.__name__) lock.acquire() try: return f(*args, ** kw) finally: lock.release() get_logger(None, "wok").debug("release %s" % f.__name__)
def load(self): try: with open(self.path, "r") as f: v = json.load(f) return Data.create(v) except Exception as e: from wok.logger import get_logger msg = ["Error loading configuration from ", self.path, ":\n\n", str(e), "\n"] get_logger(__name__).error("".join(msg)) raise
def sync_function(obj, *args, **kw): #log.debug("<ACQUIRE %s>" % f.__name__) obj._acquire() try: return f(obj, *args, ** kw) finally: try: #log.debug("<RELEASE %s>" % f.__name__) obj._release() except ThreadError: from wok.logger import get_logger get_logger(__name__).warn("<RELEASE ERROR {}.{}>".format(obj.__class__.__name__, f.__name__))
def sync_function(obj, *args, **kw): #log.debug("<ACQUIRE %s>" % f.__name__) obj._acquire() try: return f(obj, *args, **kw) finally: try: #log.debug("<RELEASE %s>" % f.__name__) obj._release() except ThreadError: from wok.logger import get_logger get_logger(__name__).warn("<RELEASE ERROR {}.{}>".format( obj.__class__.__name__, f.__name__))
def merge_into(self, conf): try: f = open(self.path, "r") v = json.load(f) cf = DataFactory.from_native(v) conf.merge(cf) f.close() except Exception as e: from wok import logger msg = ["Error loading configuration from ", self.path, ":\n\n", str(e), "\n"] logger.get_logger("config").error("".join(msg)) raise
def sync_function(obj, *args, **kw): #log.debug("<ACQUIRE %s>" % f.__name__) obj._acquire() try: return f(obj, *args, ** kw) finally: try: #log.debug("<RELEASE %s>" % f.__name__) obj._release() except: from wok.logger import get_logger get_logger(name = "synchronized").error( "<RELEASE ERROR %s.%s>" % ( obj.__class__.__name__, f.__name__))
def get_annotations_to_save(self, keys, annotations, names=None, values=None): assert (names is None and values is None) or (names is not None and values is not None and len(names) == len(values)) if names is None: names = [] values = [] else: names = [name for name in names] values = [value for value in values] ann_keys = keys if ann_keys is None: ann_keys = [] elif Data.is_list(ann_keys): ann_keys = ann_keys.to_native() else: log = logger.get_logger(__name__) log.warn("Wrong type for 'project.annotations', expecting a list but found:\n{0}".format(repr(ann_keys))) ann_keys = [] for ann_key in ann_keys: default = None if isinstance(ann_key, basestring): key = name = ann_key elif isinstance(ann_key, list) and len(ann_key) == 2: key = ann_key[0] name = ann_key[1] value = annotations[key] if key in annotations else default names += [name] values += [value] return names, values
def __init__(self, desc, name=None, logger=None, base_path=None): self._log = logger or woklogger.get_logger(_LOGGER_NAME) self.name = name or desc.get("name") self._flow_loader = None platform = desc.get("platform") self.path = desc.get("path") self.conf = desc.get("conf", default=Data.element) if isinstance(self.conf, basestring): self.conf = ConfigLoader(os.path.join(base_path or "", self.conf)).load() self.conf_rules = [] if self.path is not None: self.conf_rules += [ ConfRule(dict(set=[[rtconf.PROJECT_PATH, self.path]]), base_path, platform) ] self.conf_rules += [ ConfRule(rule, base_path, platform) for rule in desc.get("conf_rules", default=Data.list) ] self.flows = desc.get("flows", default=Data.list)
def main(): conf = OptionsConfig() if "wok" not in conf: print("Missing wok configuration") exit(-1) conf.expand_vars() logger.initialize(conf.get("wok.logging")) log = logger.get_logger("wok.server.start") # run the server retcode = 0 try: server = WokServer(conf, app) server.init() server.run() except Exception as e: log.exception(e) retcode = -1 exit(retcode)
def __init__(self, conf): if "sources" in conf: self.sources = conf["sources"] else: self.sources = DataElement() self.log = logger.get_logger(conf.get("log"), name = "em_file")
def __log(msg, level=None): from wok.logger import get_logger log = get_logger("wok.data") if level is None: import logging level = logging.WARNING log.log(level, msg)
def load_conf(self, conf_files=None, path_env="WEB_CONF_PATH", files_env="WEB_CONF_FILES", default_ext=".cfg"): logger = get_logger("wok.server", level="info") logger.info("Loading Flask configuration ...") conf_path = os.environ.get(path_env, os.getcwd()) conf_files = _get_conf_files(conf_path, conf_files or [], os.environ.get(files_env), default_ext) for cfg_path in conf_files: logger.info("+++ {}".format(cfg_path)) self.config.from_pyfile(cfg_path)
def __init__(self, conf, base_path=None): self.conf = conf self.base_path = base_path or os.getcwd() self._log = woklogger.get_logger(_LOGGER_NAME) self._projects = {}
def __init__(self, args_usage="", epilog="", logger_name=None): self.args_usage = args_usage self.epilog = epilog if logger_name is None: if hasattr(self, "cmd_name"): logger_name = self.cmd_name else: logger_name = os.path.splitext(os.path.basename(sys.argv[0]))[0] # Determine binary path script_path = os.path.dirname(sys.argv[0]) if script_path: self.launchers_path = os.path.abspath(script_path) else: self.launchers_path = os.getcwd() # Determine default paths self.root_path = os.path.normpath(os.path.join(self.launchers_path, "..")) self.bin_path = os.path.join(self.root_path, "bin") self.data_path = os.path.join(self.root_path, "data") self.conf_path = os.path.join(self.root_path, "conf") self.workflows_path = os.path.join(self.root_path, "workflows") self.runtime_path = os.path.join(self.root_path, "runtime") # Parse arguments parser = ArgumentParser( prog = "run " + self.cmd_name, epilog = self.epilog, formatter_class=RawDescriptionHelpFormatter) parser.add_argument('--version', action='version', version='%(prog)s ' + VERSION) self.add_arguments(parser) parser.add_argument("-L", "--log-level", dest="log_level", default=None, choices=["debug", "info", "warn", "error", "critical", "notset"], help="Define the logging level") self.args = parser.parse_args() # Create preliminary logger level = self.args.log_level or "info" logger.initialize(level=level) self.log = logger.get_logger(logger_name, level=level)
def main(): conf = Config() initialize({"log":{"level":"debug"}}) log = get_logger("mrna_preproc_counts") log.setLevel(logging.DEBUG) log.info("Querying log2r tumour units ...") es = EntityServer(conf["entities"]) em = es.manager()
def __init__(self, conf, logger_name=None): self._conf = conf if hasattr(self, "plugin_required_conf"): missing_conf = [] for arg in self.plugin_required_conf: if arg not in conf: missing_conf += [arg] if len(missing_conf) > 0: raise MissingConfigParamsError(missing_conf) if logger_name is not None: self._log = logger.get_logger(logger_name)
def __init__(self, config, paths, logger=None): """ :param config: intogensm.config.OncodriveClustConfig :param paths: PathsConfig :param logger: Optional logger """ self.config = config self.paths = paths self.logger = logger or get_logger("oncodriveclust") self._load_config()
def run(self): "Run the command and execute the command" self.build_conf() # Expand configuration variables self.conf = self.conf_builder.get_conf() self.conf.expand_vars() #self.log.debug(repr(self.conf)) # Validate and process configuration self.process_conf() # Regenerate configuration self.conf = self.conf_builder.get_conf() # Final logging configuration log = logger.get_logger("") log.removeHandler(log.handlers[0]) logging_conf = self.conf.get("wok.logging") logger.initialize(logging_conf) # Show some debugging information self.log.debug("Root path = {}".format(self.root_path)) self.log.debug("Conf path = {}".format(self.conf_path)) self.log.debug("Data path = {}".format(self.data_path)) self.log.debug("Workflows path = {}".format(self.workflows_path)) self.log.debug("Runtime path = {}".format(self.runtime_path)) self.log.debug("Results path = {}".format(self.results_path)) self.log.debug("Temp path = {}".format(self.temp_path)) if len(self.user_conf_files) > 0: self.log.debug("User defined configuration files:\n{}".format( "\n".join(" {}".format(cf) for cf in self.user_conf_files))) self.log.debug("Effective configuration: " + str(self.conf)) # Execute try: self.execute() except Exception as ex: self.log.exception(ex) return -1 return 0
def __init__(self, app=None, start_engine=True, **kwargs): self.app = app self._start_engine = start_engine self.engine = None self.lock = Lock() self._initialized = False self.logger = logger.get_logger("wok.server", level="info") if app is not None: self.init_app(app, **kwargs)
def download(self, target_path, prefix=None, delimiter=None, include=None, exclude=None, start_callback=None): """ Download to a local path the selected objects in the container. :param prefix: download only the objects which name starts with this prefix. Useful for pseudo-hierarchical navigation. :param delimiter: download distinct objects with names up to either the first delimiter or the end. Useful for pseudo-hierarchical navigation. """ for obj_name in self.list_objects(prefix, delimiter): if include is not None and not self.__filename_match(obj_name, include)\ or exclude is not None and self.__filename_match(obj_name, exclude): continue path = os.path.normpath(obj_name) path = os.path.normpath("{}/{}".format(target_path, obj_name)) if os.path.relpath(path, target_path).startswith(".."): get_logger(__name__).warn( "Object output path goes outside the target path: {}". format(path)) continue #print "***", obj_name, "--->", path base_path = os.path.dirname(path) if not os.path.exists(base_path): os.makedirs(base_path) prefixed_obj_name = (prefix or "") + obj_name obj = self.get_object(prefixed_obj_name) if obj.exists(): start_callback(obj_name) obj.get_data(path)
def load(self): log = get_logger(__name__) log.info("Loading project data definitions ...") log.debug("Paths:\n{0}".format("\n ".join(self.paths))) log.debug("Includes:\n{0}".format("\n ".join(self.includes))) log.debug("Excludes:\n{0}".format("\n ".join(self.excludes))) # compile regular expressions includes = [re.compile(inc) for inc in self.includes] excludes = [re.compile(exc) for exc in self.excludes] # scan paths project_ids = set() projects = [] for scan_path in self.paths: for path, project in list_projects(log, scan_path): if "id" not in project: log.warn("Discarding project that doesn't have 'id': {0}".format(path)) continue if "files" not in project: log.warn("Discarding project that doesn't have 'files': {0}".format(path)) continue project_id = project["id"] project_name = ": " + project["name"] if "name" in project else "" if match_id(project_id, includes) and not match_id(project_id, excludes): if project_id in project_ids: msg = "Duplicated project id at {0}".format(path) if self.ignore_duplicates: log.warn(msg) continue else: raise Exception(msg) log.info("Included {0}{1}".format(project_id, project_name)) self.__format_project(log, project, base_path=os.path.dirname(path)) projects += [project] else: log.info("Excluded {0}{1}".format(project_id, project_name)) return projects
def __init__(self, conf): Synchronizable.__init__(self) self.conf = conf wok_conf = conf["wok"] self._log = logger.get_logger(wok_conf.get("log"), "wok-engine") self._work_path = wok_conf.get("work_path", os.path.join(os.getcwd(), "wok")) self._flow_path = wok_conf.get("flow_path") if self._flow_path is None: self._flow_path = [os.curdir] elif not isinstance(self._flow_path, (list, DataList)): raise Exception('wok.flow_path: A list of paths expected. Example ["path1", "path2"]') self._flow_loader = FlowLoader(self._flow_path) sb = ["wok.flow_path:\n"] for uri, ff in self._flow_loader.flow_files.items(): sb += ["\t", uri, "\t", ff[0], "\n"] self._log.debug("".join(sb)) self._instances = [] self._instances_map = {} #self._lock = Lock() self._cvar = Condition(self._lock) self._run_thread = None self._running = False self._job_task_map = {} self._logs_threads = [] self._logs_queue = Queue() self._join_thread = None self._join_queue = Queue() self._notified = False self._job_mgr = self._create_job_manager(wok_conf) self._storage = self._create_storage(wok_conf) #TODO self._db = SqliteEngineDB(self) self._restore_state()
def __init__(self, name, conf_builder, project, flow, container_name, engine): self.name = name self.conf_builder = conf_builder self.project = project self.root_flow = flow self.container_name = container_name self.engine = engine self.title = flow.title self.created = datetime.now() self.flow_uri = "{}:{}".format(project.name, flow.name) # FIXME flow.uri self._log = logger.get_logger("wok.case") self._initialize_conf() # reset state self._state = runstates.PAUSED # components by name self._component_by_cname = None # create nodes tree self.root_node = self._create_tree(self.root_flow) # calculate components configuration self._apply_component_conf() # connect ports and create dependencies self._connect_ports_and_create_deps() # calculate the list of tasks in dependency order self.tasks = self._tasks_in_dependency_order() # list of used platforms self.platforms = self._used_platforms() # calculate priorities for node in self.root_node.children: self._calculate_priorities(node) # Track the number of active workitems self.num_active_workitems = 0 # Whether this case is marked for removal when all the jobs finish self.removed = False
def __init__(self, name, engine, conf): self.name = name self.engine = engine self.conf = conf self.__next_id = 1 self._log = logger.get_logger(conf.get("log"), name) if "work_path" not in self.conf: raise Exception("Missing 'work_path' in configuration") self._work_path = conf["work_path"] self._output_path = self.conf.get("output_path", os.path.join(self._work_path, "output"))
def run(self): "Run the command and execute the command" self.build_conf() # Expand configuration variables self.engine_conf = self.engine_conf_args.validated_conf() self.case_conf = self.case_conf_args.validated_conf(expand_vars=False) # Validate and process configuration self.process_conf() # Regenerate configuration self.engine_conf = self.engine_conf_builder.get_conf() self.case_conf = self.case_conf_builder.get_conf() # Final logging configuration log = logger.get_logger("") log.removeHandler(log.handlers[0]) logging_conf = self.engine_conf.get("wok.logging") logger.initialize(logging_conf) # Show some debugging information self.log.debug("Root path = {}".format(self.root_path)) self.log.debug("Conf path = {}".format(self.conf_path)) #self.log.debug("Runtime path = {}".format(self.runtime_path)) #self.log.debug("Results path = {}".format(self.results_path)) #self.log.debug("Temp path = {}".format(self.temp_path)) self.engine_conf_args.log_debug() self.case_conf_args.log_debug() # Execute try: self.execute() except BaseException as ex: self.log.error(str(ex)) from traceback import format_exc self.log.debug(format_exc()) return -1 return 0
def _init_conf(self, app, conf_files, path_var, files_var, args_var): self.logger.info("Checking Wok configuration files ...") args = [] conf_path = app.config.get(path_var, os.environ.get(path_var, os.getcwd())) wok_conf_files = _get_conf_files( conf_path, conf_files or [], app.config.get(files_var, os.environ.get(files_var)), ".conf") for path in wok_conf_files: if not os.path.exists(path): self.logger.error("--- {} (not found)".format(path)) continue self.logger.info("+++ {}".format(path)) args += ["-c", path] env_args = os.environ.get(args_var) if env_args is not None: env_args = env_args.strip().split(" ") wok_conf_args = app.config.get(args_var, env_args) if wok_conf_args is not None: args += wok_conf_args self.logger.debug("Arguments: {}".format(" ".join(wok_conf_args))) self.logger.info("Loading Wok configuration ...") parser = ArgumentParser() wok_args = Arguments(parser, case_name_args=False, logger=self.logger) wok_args.initialize(parser.parse_args(args)) self.conf_builder = wok_args.engine_conf_builder self.conf = wok_args.engine_conf.expand_vars() self.case_conf_builder = wok_args.case_conf_builder self.case_conf = wok_args.case_conf.expand_vars() # initialize logging according to the configuration log = logger.get_logger("") log.removeHandler(log.handlers[0]) logging_conf = self.conf.clone().expand_vars().get("wok.logging") logger.initialize(logging_conf) self.logger.debug(repr(self.conf))
def _init_conf(self, app, conf_files, path_var, files_var, args_var): self.logger.info("Checking Wok configuration files ...") args = [] conf_path = app.config.get(path_var, os.environ.get(path_var, os.getcwd())) wok_conf_files = _get_conf_files(conf_path, conf_files or [], app.config.get(files_var, os.environ.get(files_var)), ".conf") for path in wok_conf_files: if not os.path.exists(path): self.logger.error("--- {} (not found)".format(path)) continue self.logger.info("+++ {}".format(path)) args += ["-c", path] env_args = os.environ.get(args_var) if env_args is not None: env_args = env_args.strip().split(" ") wok_conf_args = app.config.get(args_var, env_args) if wok_conf_args is not None: args += wok_conf_args self.logger.debug("Arguments: {}".format(" ".join(wok_conf_args))) self.logger.info("Loading Wok configuration ...") parser = ArgumentParser() wok_args = Arguments(parser, case_name_args=False, logger=self.logger) wok_args.initialize(parser.parse_args(args)) self.conf_builder = wok_args.engine_conf_builder self.conf = wok_args.engine_conf.expand_vars() self.case_conf_builder = wok_args.case_conf_builder self.case_conf = wok_args.case_conf.expand_vars() # initialize logging according to the configuration log = logger.get_logger("") log.removeHandler(log.handlers[0]) logging_conf = self.conf.clone().expand_vars().get("wok.logging") logger.initialize(logging_conf) self.logger.debug(repr(self.conf))
def __init__(self, conf, flow = None): self.conf = conf wok_conf = conf["wok"] self._log = logger.get_logger(wok_conf.get("log"), "engine") self._instance_name = wok_conf["__instance.name"] self._work_path = wok_conf.get("work_path", os.path.join(os.getcwd(), "wok")) self._output_path = os.path.join(self._work_path, "output") self._ports_path = os.path.join(self._work_path, "ports") self._tasks_path = os.path.join(self._work_path, "tasks") if "port_map" in wok_conf: self._port_data_conf = wok_conf["port_map"] else: self._port_data_conf = wok_conf.create_element() self._autorm_task = wok_conf.get("auto_remove.task", False, dtype=bool) self._clean = wok_conf.get("clean", True, dtype=bool) self._stop_on_errors = wok_conf.get("stop_on_errors", True, dtype=bool) self._maxpar = wok_conf.get("defaults.maxpar", 0, dtype=int) self._wsize = wok_conf.get("defaults.wsize", 0, dtype=int) self._start_module = wok_conf.get("start_module") self._flow = None self._state = WokEngine.S_UNINITIALIZED self._run_thread = None self._job_sched = self._create_job_scheduler(wok_conf) self._run_lock = Lock() if flow is not None: self._initialize(flow)
def __init__(self, name, conf): self._conf = conf self._name = conf.get("name", name) self._log = logger.get_logger("wok.platform.{}".format(name)) if "work_path" not in self._conf: raise MissingConfigParamError("work_path") self._work_path = conf["work_path"] if not os.path.exists(self._work_path): os.makedirs(self._work_path) self._data = self._create_data_provider() self._storage = self._create_storage() self._job_manager = self._create_job_manager() self._callbacks = CallbackManager(delegates=[(events.JOB_UPDATE, self._job_manager.callbacks)])
def __init__(self, desc, name=None, logger=None, base_path=None): self._log = logger or woklogger.get_logger(_LOGGER_NAME) self.name = name or desc.get("name") self._flow_loader = None platform = desc.get("platform") self.path = desc.get("path") self.conf = desc.get("conf", default=Data.element) if isinstance(self.conf, basestring): self.conf = ConfigLoader(os.path.join(base_path or "", self.conf)).load() self.conf_rules = [] if self.path is not None: self.conf_rules += [ConfRule(dict(set=[[rtconf.PROJECT_PATH, self.path]]), base_path, platform)] self.conf_rules += [ConfRule(rule, base_path, platform) for rule in desc.get("conf_rules", default=Data.list)] self.flows = desc.get("flows", default=Data.list)
def db_update(engine, session, logger=None): if logger is None: logger = woklogger.get_logger(__name__) version_param = session.query(DbParam).filter(DbParam.name == "version").first() if version_param is None: version_param = DbParam(name="version", value=1) session.add(version_param) if version_param.value < DB_VERSION: logger.info("Updating the server database ...") for i in range(version_param.value, DB_VERSION): logger.info(" {} --> {} ...".format(i, i + 1)) sts = __CHANGES[i - 1] if isinstance(sts, basestring): sts = [sts] for sql in sts: logger.debug(" " + sql) engine.execute(sql) version_param.value = DB_VERSION
def __init__(self, name, conf): self._conf = conf self._name = conf.get("name", name) self._log = logger.get_logger("wok.platform.{}".format(name)) if "work_path" not in self._conf: raise MissingConfigParamError("work_path") self._work_path = conf["work_path"] if not os.path.exists(self._work_path): os.makedirs(self._work_path) self._data = self._create_data_provider() self._storage = self._create_storage() self._job_manager = self._create_job_manager() self._callbacks = CallbackManager( delegates=[(events.JOB_UPDATE, self._job_manager.callbacks)])
def __init__(self, name, conf): self._name = name self._conf = conf self._log = logger.get_logger("wok.jobs.{}".format(name)) if "work_path" not in self._conf: raise MissingConfigParamError("work_path") self._work_path = conf["work_path"] if not os.path.exists(self._work_path): os.makedirs(self._work_path) db_path = os.path.join(self._work_path, "jobs.db") # TODO if not in recover_mode then delete jobs.db self._db = create_engine("sqlite:///{}".format(db_path)) self._session_factory = create_session_factory(self._db) self._callbacks = CallbackManager(valid_events=[events.JOB_UPDATE]) # Lock for database jobs state changes self._lock = threading.Lock()
def db_update(engine, session, logger=None): if logger is None: logger = woklogger.get_logger(__name__) version_param = session.query(DbParam).filter( DbParam.name == "version").first() if version_param is None: version_param = DbParam(name="version", value=1) session.add(version_param) if version_param.value < DB_VERSION: logger.info("Updating the server database ...") for i in range(version_param.value, DB_VERSION): logger.info(" {} --> {} ...".format(i, i + 1)) sts = __CHANGES[i - 1] if isinstance(sts, basestring): sts = [sts] for sql in sts: logger.debug(" " + sql) engine.execute(sql) version_param.value = DB_VERSION
def initialize(self): self.conf = self.conf_builder() wok_conf = self.conf["wok"] inst_conf = wok_conf["__instance"] self._log = logger.get_logger(wok_conf.get("log"), self.name) self._work_path = inst_conf["work_path"] # TODO deprecated #TODO deprecated self._port_data_conf = wok_conf.create_element() self.default_maxpar = wok_conf.get("defaults.maxpar", 0, dtype=int) self.default_wsize = wok_conf.get("defaults.wsize", 1, dtype=int) self.root_flow = self.engine.flow_loader.load_from_file(self.flow_file) wok_conf["__flow.name"] = self.root_flow.name wok_conf["__flow.path"] = os.path.dirname(os.path.abspath(self.flow_file)) wok_conf["__flow.file"] = os.path.basename(self.flow_file) # self._log.debug("\n" + repr(self.root_flow)) # create nodes tree self.root_node = self._create_tree(self.root_flow, namespace = "") # connect ports self._connect_ports(self.root_node, namespace = self.root_node.name) # calculcate dependencies self._calculate_dependencies(self.root_node) # calculate priorities for m in self.root_node.modules: self._calculate_priorities(m)
def __init__(self): # Get task key and storage configuration cmd_conf = OptionsConfig( required=["case", "task", "index", "data.type", "storage.type"]) # Register signals self._signal_names = {} for signame in [x for x in dir(signal) if x.startswith("SIG")]: try: signum = getattr(signal, signame) signal.signal(signum, self.__signal_handler) self._signal_names[signum] = signame except: pass # command line configuration case_name = cmd_conf["case"] task_cname = cmd_conf["task"] workitem_index = cmd_conf["index"] # initialize the data provider provider_conf = cmd_conf["data"] self._provider = data_provider_factory.create(provider_conf) self._provider.start() # initialize storage storage_conf = cmd_conf["storage"] self.storage = storage_factory.create(storage_conf) self.storage = self.storage.get_container(case_name) # load the module and task descriptors task_desc = self._provider.load_task(case_name, task_cname) workitem_desc = self._provider.load_workitem(case_name, task_cname, workitem_index) partition = workitem_desc["partition"] # setup task configuration self.conf = Data.create(task_desc["conf"]) self.conf["__task_index"] = workitem_index self.conf.expand_vars() # setup task attributes self.case = workitem_desc["case"] self.task = workitem_desc["task"] self.id = workitem_desc["cname"] self.name = workitem_desc["name"] self.index = workitem_index # initialize decorators self._main = None self._sources = [] self._foreach = None self._begin = None self._end = None self._start_time = 0 self._end_time = self._start_time # intialize task logging log_conf = self.conf.get("logging") logger.initialize(log_conf) self.logger = logger.get_logger(self.name) self.logger.debug("Task descriptor: {}".format(Data.create(task_desc))) self.logger.debug("WorkItem descriptor: {}".format( Data.create(workitem_desc))) # Initialize input stream self._stream = Stream(self._provider, task_desc["stream"]) # Initialize ports self._ports = {} self._in_ports = [] self._out_ports = [] self._open_ports = {} if "ports" in task_desc and "ports" in partition: port_descriptors = Data.create(task_desc["ports"]) for port_desc in port_descriptors.get("in", default=list): port_desc["mode"] = PORT_MODE_IN self._ports[port_desc["name"]] = port_desc self._in_ports += [port_desc] for port_desc in port_descriptors.get("out", default=list): port_desc["mode"] = PORT_MODE_OUT self._ports[port_desc["name"]] = port_desc self._out_ports += [port_desc] port_descriptors = Data.create(partition["ports"]) for port_desc in port_descriptors.get("in", default=list): task_port_desc = self._ports[port_desc["name"]] task_port_desc["data"] = port_desc["data"] for port_desc in port_descriptors.get("out", default=list): task_port_desc = self._ports[port_desc["name"]] task_port_desc["data"] = port_desc["data"] # Get hostname try: import socket self.hostname = socket.gethostname() except: self.hostname = "unknown" # The context field is free to be used by the task user to # save variables related with the whole task life cycle. # By default it is initialized with a dictionary but can be # overwrote with any value by the user. Wok will never use it. self.context = {}
def missing_fields(self, keys): from wok.logger import get_logger get_logger(__name__).warn("{0}.missing_fields() is deprecated, " "use {0}.missing_keys() instead".format( self.__class__.__name__)) return self.missing_keys(keys)