Ejemplo n.º 1
0
Archivo: model.py Proyecto: bbglab/wok
	def __init__(self, name, title = "", desc = "", enabled = True,
					serializer = None, wsize = None,
					maxpar = None, conf = None, in_ports = None, out_ports = None,
					resources = None, params = None):
		_BasePort.__init__(self, name, title, desc, enabled, serializer, wsize)

		self.maxpar = maxpar
		self.conf = conf

		if in_ports is None:
			self.in_ports = []
			self.in_port_map = {}
		else:
			self.in_ports = in_ports
			for p in in_ports:
				self.in_port_map[p.name] = p

		if out_ports is None:
			self.out_ports = []
			self.out_port_map = {}
		else:
			self.out_ports = out_ports
			for p in out_ports:
				self.out_port_map[p.name] = p

		if resources is None:
			resources = Data.element()
		self.resources = resources

		if params is None:
			params = []
		self.params = params
Ejemplo n.º 2
0
	def create_case(self, user, case_name, conf_builder, project_name, flow_name, properties=None, start=True):
		case = Case(
					owner_id=user.id,
					name=case_name,
					project_name=project_name,
					flow_name=flow_name,
					conf=conf_builder.get_conf(),
					properties=Data.element(properties))

		session = db.Session()
		session.add(case)
		session.commit()

		engine_case_name = "{}-{}".format(user.nick, case_name)
		#while self.engine.exists_case(engine_case_name):
		#	engine_case_name = "{}-{}".format(user.nick, uuid4().hex[-6:])

		engine_case = self.engine.create_case(engine_case_name, conf_builder, project_name, flow_name, engine_case_name)

		case.created = engine_case.created
		case.engine_name = engine_case_name
		session.commit()

		if start:
			engine_case.start()

		return case
Ejemplo n.º 3
0
    def to_element(self, e=None):
        if e is None:
            e = Data.element()

        e["ns"] = self.namespace
        e["name"] = self.name
        e["cname"] = self.cname

        return e
Ejemplo n.º 4
0
Archivo: nodes.py Proyecto: bbglab/wok
	def to_element(self, e = None):
		if e is None:
			e = Data.element()

		e["ns"] = self.namespace
		e["name"] = self.name
		e["cname"] = self.cname
		
		return e
Ejemplo n.º 5
0
Archivo: nodes.py Proyecto: bbglab/wok
	def resources(self):
		if self.parent is None:
			res = Data.element()
		else:
			res = self.parent.resources

		if self.model.resources is not None:
			res.merge(self.model.resources)

		return res
Ejemplo n.º 6
0
    def resources(self):
        if self.parent is None:
            res = Data.element()
        else:
            res = self.parent.resources

        if self.model.resources is not None:
            res.merge(self.model.resources)

        return res
Ejemplo n.º 7
0
def initialize(conf=None, format=None, datefmt=None, level=None):
    """
	Initialize the logging system.

	If conf is a dictionary then the parameters considered for configuration are:
	  - format: Logger format
	  - datefmt: Date format
	  - loggers: list of tuples (name, conf) to configure loggers

	If conf is a list then only the loggers are configured.

	If conf is an string then the default logger is configured for the logging level.
	"""

    global _initialized

    if conf is None:
        conf = Data.element()
    elif not isinstance(conf, basestring):
        conf = Data.create(conf)

    if Data.is_list(conf):
        loggers_conf = conf
        conf = Data.element()
    elif Data.is_element(conf):
        loggers_conf = conf.get("loggers", default=Data.list)
    elif isinstance(conf, basestring):
        loggers_conf = Data.list([["", conf]])
        conf = Data.element()

    format = format or conf.get("format", _DEFAULT_FORMAT)
    datefmt = datefmt or conf.get("datefmt", _DEFAULT_DATEFMT)

    logging.basicConfig(format=format, datefmt=datefmt)

    for (log_name, log_conf) in loggers_conf:
        init_logger(log_name, conf=log_conf)

    if level is not None:
        init_logger("", conf=level)

    _initialized = True
Ejemplo n.º 8
0
Archivo: logger.py Proyecto: bbglab/wok
def initialize(conf=None, format=None, datefmt=None, level=None):
	"""
	Initialize the logging system.

	If conf is a dictionary then the parameters considered for configuration are:
	  - format: Logger format
	  - datefmt: Date format
	  - loggers: list of tuples (name, conf) to configure loggers

	If conf is a list then only the loggers are configured.

	If conf is an string then the default logger is configured for the logging level.
	"""

	global _initialized

	if conf is None:
		conf = Data.element()
	elif not isinstance(conf, basestring):
		conf = Data.create(conf)

	if Data.is_list(conf):
		loggers_conf = conf
		conf = Data.element()
	elif Data.is_element(conf):
		loggers_conf = conf.get("loggers", default=Data.list)
	elif isinstance(conf, basestring):
		loggers_conf = Data.list([["", conf]])
		conf = Data.element()

	format = format or conf.get("format", _DEFAULT_FORMAT)
	datefmt = datefmt or conf.get("datefmt", _DEFAULT_DATEFMT)

	logging.basicConfig(format=format, datefmt=datefmt)

	for (log_name, log_conf) in loggers_conf:
		init_logger(log_name, conf=log_conf)

	if level is not None:
		init_logger("", conf=level)

	_initialized = True
Ejemplo n.º 9
0
    def to_element(self, e=None):
        if e is None:
            e = Data.element()

        e["name"] = self.name
        e["state"] = str(self.state)
        e["conf"] = self.conf

        #FIXME
        self.root_node.update_tasks_count_by_state()
        self.root_node.update_component_count_by_state()
        self.root_node.to_element(e.element("root"))

        return e
Ejemplo n.º 10
0
Archivo: nodes.py Proyecto: bbglab/wok
	def __init__(self, parent, index, id=None, namespace="", state=runstates.READY, substate=None, partition=None):
		Node.__init__(self, parent, namespace=namespace)

		self.id = id

		self.index = index

		self.state = state
		self.substate = substate

		self.partition = partition or Data.element()

		self.job_id = None
		self.job_result = None
Ejemplo n.º 11
0
Archivo: case.py Proyecto: bbglab/wok
	def to_element(self, e = None):
		if e is None:
			e = Data.element()

		e["name"] = self.name
		e["state"] = str(self.state)
		e["conf"] = self.conf

		#FIXME
		self.root_node.update_tasks_count_by_state()
		self.root_node.update_component_count_by_state()
		self.root_node.to_element(e.element("root"))

		return e
Ejemplo n.º 12
0
Archivo: case.py Proyecto: bbglab/wok
	def _initialize_conf(self):
		# project defined config
		self.conf = conf = self.project.get_conf()

		# user defined config
		self.user_conf = self.conf_builder.get_conf()
		if "wok" in self.user_conf:
			self.user_conf.delete("wok.work_path", "wok.projects", "wok.platforms", "wok.logging")
		conf.merge(self.user_conf)

		# runtime config
		conf[rtconf.CASE_NAME] = self.name
		conf[rtconf.FLOW] = Data.element(dict(
			name=self.root_flow.name,
			uri=self.flow_uri))
Ejemplo n.º 13
0
    def _initialize_conf(self):
        # project defined config
        self.conf = conf = self.project.get_conf()

        # user defined config
        self.user_conf = self.conf_builder.get_conf()
        if "wok" in self.user_conf:
            self.user_conf.delete("wok.work_path", "wok.projects",
                                  "wok.platforms", "wok.logging")
        conf.merge(self.user_conf)

        # runtime config
        conf[rtconf.CASE_NAME] = self.name
        conf[rtconf.FLOW] = Data.element(
            dict(name=self.root_flow.name, uri=self.flow_uri))
Ejemplo n.º 14
0
Archivo: engine.py Proyecto: bbglab/wok
	def _create_platforms(self):
		"""
		Creates the platform according to the configuration
		:return: Platform
		"""

		platform_confs = self._conf.get("platforms")
		if platform_confs is None:
			platform_confs = Data.list()
		elif not Data.is_list(platform_confs):
			self._log.error("Wrong configuration type for 'platforms': {}".format(platform_confs))
			platform_confs = Data.list()

		if len(platform_confs) == 0:
			platform_confs += [Data.element(dict(type="local"))]

		platforms = []

		names = {}
		for pidx, platform_conf in enumerate(platform_confs):
			if isinstance(platform_conf, basestring):
				if not os.path.isabs(platform_conf) and self._conf_base_path is not None:
					platform_conf = os.path.join(self._conf_base_path, platform_conf)
				platform_conf = ConfigLoader(platform_conf).load()

			if not Data.is_element(platform_conf):
				raise errors.ConfigTypeError("wok.platforms[{}]".format(pidx, platform_conf))

			ptype = platform_conf.get("type", "local")

			name = platform_conf.get("name", ptype)
			if name in names:
				name = "{}-{}".format(name, names[name])
				names[name] += 1
			else:
				names[name] = 2
			platform_conf["name"] = name

			if "work_path" not in platform_conf:
				platform_conf["work_path"] = os.path.join(self._work_path, "platform_{}".format(name))

			self._log.info("Creating '{}' platform ...".format(name))
			self._log.debug("Platform configuration: {}".format(repr(platform_conf)))

			platforms += [create_platform(ptype, platform_conf)]

		return platforms
Ejemplo n.º 15
0
    def _load_project_desc(self, path, base_path=None):
        if not os.path.isabs(path):
            if base_path is not None:
                path = os.path.join(base_path, path)
            else:
                path = os.path.abspath(path)

        if not os.path.exists(path):
            raise Exception("Project path not found: {}".format(path))

        if os.path.isdir(path):
            path = os.path.join(path, "project.conf")

        if not os.path.isfile(path):
            raise Exception("Project configuration not found: {}".format(path))

        project = Data.element()
        project.merge(ConfigLoader(path).load())

        base_path = os.path.dirname(path)

        if "path" not in project:
            project["path"] = base_path

        if not os.path.isabs(project["path"]):
            project["path"] = os.path.normpath(
                os.path.join(base_path, project["path"]))

        if "conf" in project and isinstance(project["conf"], basestring):
            conf_path = os.path.join(base_path, project["conf"])
            project["conf"] = ConfigLoader(conf_path).load()

        if "conf_rules" in project and isinstance(project["conf_rules"],
                                                  basestring):
            base_path = os.path.dirname(path)
            conf_path = os.path.join(base_path, project["conf_rules"])
            project["conf_rules"] = ConfigLoader(conf_path).load()

        if "conf_rules" in project and Data.is_list(project["conf_rules"]):
            for rule in project["conf_rules"]:
                if Data.is_element(rule) and "merge" in rule and isinstance(
                        rule["merge"], basestring):
                    rule["merge"] = ConfigLoader(
                        os.path.join(base_path, rule["merge"])).load()

        return project
Ejemplo n.º 16
0
	def _load_project_desc(self, path, base_path=None):
		if not os.path.isabs(path):
			if base_path is not None:
				path = os.path.join(base_path, path)
			else:
				path = os.path.abspath(path)

		if not os.path.exists(path):
			raise Exception("Project path not found: {}".format(path))

		if os.path.isdir(path):
			path = os.path.join(path, "project.conf")

		if not os.path.isfile(path):
			raise Exception("Project configuration not found: {}".format(path))

		project = Data.element()
		project.merge(ConfigLoader(path).load())

		base_path = os.path.dirname(path)

		if "path" not in project:
			project["path"] = base_path

		if not os.path.isabs(project["path"]):
			project["path"] = os.path.normpath(os.path.join(base_path, project["path"]))

		if "conf" in project and isinstance(project["conf"], basestring):
			conf_path = os.path.join(base_path, project["conf"])
			project["conf"] = ConfigLoader(conf_path).load()

		if "conf_rules" in project and isinstance(project["conf_rules"], basestring):
			base_path = os.path.dirname(path)
			conf_path = os.path.join(base_path, project["conf_rules"])
			project["conf_rules"] = ConfigLoader(conf_path).load()

		if "conf_rules" in project and Data.is_list(project["conf_rules"]):
			for rule in project["conf_rules"]:
				if Data.is_element(rule) and "merge" in rule and isinstance(rule["merge"], basestring):
					rule["merge"] = ConfigLoader(os.path.join(base_path, rule["merge"])).load()

		return project
Ejemplo n.º 17
0
	def _start(self):

		self._log.debug("Creating session ...")

		self._session = saga.Session()

		ctxs_conf = self._conf.get("context")
		if ctxs_conf is not None:
			if Data.is_element(ctxs_conf):
				ctxs_conf = Data.list([ctxs_conf])

			for ctx_conf in ctxs_conf:
				try:
					ctx = saga.Context(ctx_conf["type"])
					for key in ctx_conf:
						if hasattr(ctx, key):
							setattr(ctx, key, ctx_conf[key])
					self._session.add_context(ctx)
				except Exception as ex:
					self._log.error("Wrong context configuration: {}".format(repr(ctx_conf)))
					self._log.exception(ex)

		self._log.debug("Creating job service ...")

		url = self._conf.get("service_url", "fork://localhost", dtype=str)
		self._job_service = saga.job.Service(url, session=self._session)

		self._remote_dir = saga.filesystem.Directory(self._file_url, session=self._session)

		# FIXME Use the logging configuration mechanisms of SAGA
		from wok import logger
		logger.init_logger("SGEJobService", conf=Data.element(dict(level=self._conf.get("saga_log.level", "error"))))

		# TODO count the number of previously queued jobs

		# TODO clean output files ?

		self._running = True
		self._run_thread = threading.Thread(target=self._run_handler, name="{}-run".format(self._name))
		self._join_thread = threading.Thread(target=self._join_handler, name="{}-join".format(self._name))
		self._run_thread.start()
		self._join_thread.start()
Ejemplo n.º 18
0
    def _override_component(ovr, src):
        ovr.name = src.name
        if src.title is not None:
            ovr.title = src.title
        if src.desc is not None:
            ovr.desc = src.desc
        if src.enabled is not None:
            ovr.enabled = src.enabled
        if src.serializer is not None:
            ovr.serializer = mode_def.serializer
        if src.wsize is not None:
            ovr.wsize = mode_def.wsize
        if src.conf is not None:
            if ovr.conf is None:
                ovr.conf = Data.element()
            ovr.conf.merge(mode_def.conf)

        ovr.priority = src.priority
        ovr.depends = src.depends
        ovr.flow_ref = src.flow_ref
Ejemplo n.º 19
0
Archivo: case.py Proyecto: bbglab/wok
	def _override_component(ovr, src):
		ovr.name = src.name
		if src.title is not None:
			ovr.title = src.title
		if src.desc is not None:
			ovr.desc = src.desc
		if src.enabled is not None:
			ovr.enabled = src.enabled
		if src.serializer is not None:
			ovr.serializer = mode_def.serializer
		if src.wsize is not None:
			ovr.wsize = mode_def.wsize
		if src.conf is not None:
			if ovr.conf is None:
				ovr.conf = Data.element()
			ovr.conf.merge(mode_def.conf)

		ovr.priority = src.priority
		ovr.depends = src.depends
		ovr.flow_ref = src.flow_ref
Ejemplo n.º 20
0
    def __init__(self,
                 name,
                 title="",
                 desc="",
                 enabled=True,
                 serializer=None,
                 wsize=None,
                 maxpar=None,
                 conf=None,
                 in_ports=None,
                 out_ports=None,
                 resources=None,
                 params=None):
        _BasePort.__init__(self, name, title, desc, enabled, serializer, wsize)

        self.maxpar = maxpar
        self.conf = conf

        if in_ports is None:
            self.in_ports = []
            self.in_port_map = {}
        else:
            self.in_ports = in_ports
            for p in in_ports:
                self.in_port_map[p.name] = p

        if out_ports is None:
            self.out_ports = []
            self.out_port_map = {}
        else:
            self.out_ports = out_ports
            for p in out_ports:
                self.out_port_map[p.name] = p

        if resources is None:
            resources = Data.element()
        self.resources = resources

        if params is None:
            params = []
        self.params = params
Ejemplo n.º 21
0
Archivo: logger.py Proyecto: bbglab/wok
def init_logger(logger, conf):
	"""
	Initializa a logger from configuration. Configuration can be:
	- An string referring to the log level
	- A dictionary with the following parameters:
	  - level: log level
	  - handlers: List of log handlers or just a handler. Each handler have the following parameters:
	    - type
	    - ...: each handler type has a set of parameters

	Supported handlers:
	- smtp: Send logs by email. Parameters:
	  - host
	  - port (optional)
	  - user
	  - pass
	  - from
	  - to
	  - subject
	  - level
	  - format: can be a simple string or a list of strings that will be joint with '\n'
	"""
	if isinstance(logger, basestring):
		logger = get_logger(logger)

	if isinstance(conf, basestring):
		conf = Data.element(dict(level=conf))
	else:
		conf = Data.create(conf)

	level = conf.get("level")
	if level is not None:
		logger.setLevel(get_level(level))

	handlers_conf = conf.get("handlers", default=Data.list)
	if Data.is_element(handlers_conf):
		handlers_conf = Data.list([handlers_conf])

	for handler_conf in handlers_conf:
		handler = get_handler(logger, handler_conf)
		logger.addHandler(handler)
Ejemplo n.º 22
0
def init_logger(logger, conf):
    """
	Initializa a logger from configuration. Configuration can be:
	- An string referring to the log level
	- A dictionary with the following parameters:
	  - level: log level
	  - handlers: List of log handlers or just a handler. Each handler have the following parameters:
	    - type
	    - ...: each handler type has a set of parameters

	Supported handlers:
	- smtp: Send logs by email. Parameters:
	  - host
	  - port (optional)
	  - user
	  - pass
	  - from
	  - to
	  - subject
	  - level
	  - format: can be a simple string or a list of strings that will be joint with '\n'
	"""
    if isinstance(logger, basestring):
        logger = get_logger(logger)

    if isinstance(conf, basestring):
        conf = Data.element(dict(level=conf))
    else:
        conf = Data.create(conf)

    level = conf.get("level")
    if level is not None:
        logger.setLevel(get_level(level))

    handlers_conf = conf.get("handlers", default=Data.list)
    if Data.is_element(handlers_conf):
        handlers_conf = Data.list([handlers_conf])

    for handler_conf in handlers_conf:
        handler = get_handler(logger, handler_conf)
        logger.addHandler(handler)
Ejemplo n.º 23
0
    def __init__(self,
                 parent,
                 index,
                 id=None,
                 namespace="",
                 state=runstates.READY,
                 substate=None,
                 partition=None):
        Node.__init__(self, parent, namespace=namespace)

        self.id = id

        self.index = index

        self.state = state
        self.substate = substate

        self.partition = partition or Data.element()

        self.job_id = None
        self.job_result = None
Ejemplo n.º 24
0
Archivo: native.py Proyecto: bbglab/wok
	def prepare(self, case, task, index):
		execution = task.execution
		exec_conf = execution.conf
		if exec_conf is None:
			exec_conf = Data.element()

		if "script_path" not in exec_conf:
			raise MissingValueError("script_path")

		script_path = exec_conf["script_path"]

		lang = exec_conf.get("language", "python")

		case_conf = case.conf.clone().expand_vars()

		# Environment variables
		env = Data.element()
		#for k, v in os.environ.items():
		#	env[k] = v
		env.merge(task.conf.get(rtconf.TASK_ENV))
		env.merge(exec_conf.get("env"))

		# Default module script path
		platform_project_path = task.conf.get(rtconf.PROJECT_PATH, case.project.path)
		flow_path = os.path.abspath(os.path.dirname(task.flow_path))
		flow_rel_path = os.path.relpath(flow_path, case.project.path)
		platform_script_path = os.path.join(platform_project_path, flow_rel_path, script_path)
		env[ENV_PROJECT_PATH] = platform_project_path
		env[ENV_FLOW_PATH] = flow_rel_path
		env[ENV_SCRIPT_PATH] = script_path
		env[ENV_PLATFORM_SCRIPT_PATH] = platform_script_path

		script = []
		
		sources = task.conf.get(rtconf.TASK_SOURCES, default=Data.list)
		if isinstance(sources, basestring):
			sources = Data.list([sources])

		for source in sources:
			script += ['source "{}"'.format(source)]
	
		if lang == "python":
			virtualenv = task.conf.get(rtconf.TASK_PYTHON_VIRTUALENV)
			if virtualenv is not None:
				#script += ["set -x"]
				#script += ["echo Activating virtualenv {} ...".format(virtualenv)]
				script += ['source "{}"'.format(os.path.join(virtualenv, "bin", "activate"))]
				#script += ["set +x"]

			#script += ["echo Running workitem ..."]

			cmd = [task.conf.get(rtconf.TASK_PYTHON_BIN, "python")]
			cmd += ["${}".format(ENV_PLATFORM_SCRIPT_PATH)]

			lib_path = task.conf.get(rtconf.TASK_PYTHON_LIBS)
			if lib_path is not None:
				if Data.is_list(lib_path):
					lib_path = ":".join(lib_path)

				if "PYTHONPATH" in env:
					env["PYTHONPATH"] = lib_path + ":" + env["PYTHONPATH"]
				else:
					env["PYTHONPATH"] = lib_path
		else:
			raise LanguageError(lang)

		cmd += ["-D", "case={}".format(case.name),
				"-D", "task={}".format(task.cname),
				"-D", "index={}".format(index)]

		#for key, value in self._storage_conf(workitem.case.engine.storage.basic_conf):
		#	cmd += ["-D", "storage.{}={}".format(key, value)]

		for key, value in self._plain_conf(Data.create(task.platform.data.context_conf(CTX_EXEC))):
			cmd += ["-D", "data.{}={}".format(key, value)]

		for key, value in self._plain_conf(task.platform.storage.context_conf(CTX_EXEC)):
			cmd += ["-D", "storage.{}={}".format(key, value)]

		script += [" ".join(cmd)]
		
		return "\n".join(script), env.to_native()
Ejemplo n.º 25
0
def init_project(logger, config, paths, storage, project):
	project_id = project["id"]

	results_path = paths.results_path()
	project_path = paths.project_path(project_id)
	project_temp_path = paths.project_temp_path(project_path)

	if config.results.purge_on_start:
		logger.info("  Purging previous results ...")
		if os.path.isdir(project_path):
			logger.info("    {} ...".format(os.path.relpath(project_path, results_path)))
			shutil.rmtree(project_path)
		#if os.path.isdir(project_temp_path):
		#	logger.info("    {} ...".format(os.path.relpath(project_temp_path, results_path)))
		#	shutil.rmtree(project_temp_path)

		for obj_name in storage.list_objects(prefix="results/"):
			logger.info("    {} ...".format(obj_name))
			storage.delete_object("results/{}".format(obj_name))

	ensure_path_exists(project_path)
	ensure_path_exists(project_temp_path)

	projdb_path = os.path.join(project_path, "project.db")

	if "annotations" in project:
		annotations = project["annotations"]
		if not Data.is_element(annotations):
			logger.warn("Overriding project annotations field with an empty dictionary")
			project["annotations"] = annotations = Data.element()
	else:
		project["annotations"] = annotations = Data.element()

	# for backward compatibility
	for key in project.keys():
		if key not in ["id", "assembly", "files", "storage_objects", "annotations", "conf", "oncodriveclust", "oncodrivefm"]:
			value = project[key]
			del project[key]
			annotations[key] = value

	project["conf"] = pconf = project.get("conf") or Data.element()
	if not Data.is_element(pconf):
		logger.warn("Overriding project conf field with an empty dictionary")
		project["conf"] = pconf = Data.element()

	# for backward compatibility
	for key in project.keys():
		if key in ["oncodriveclust", "oncodrivefm"]:
			value = project[key]
			del project[key]
			pconf[key] = value

	project["path"] = project_path
	project["temp_path"] = project_temp_path
	project["db"] = projdb_path

	if "assembly" not in project:
		project["assembly"] = DEFAULT_ASSEMBLY

	missing_objects = []

	for obj_name in project["storage_objects"]:
		if not storage.exists_object(obj_name):
			missing_objects += [obj_name]

	if len(missing_objects) > 0:
		raise InternalError("Project {0} references some missing objects:\n{1}".format(project_id, "\n".join(missing_objects)))

	project["files"] = [str(f) for f in project["files"]] #unicode is not json serializable
	project["storage_objects"] = [str(f) for f in project["storage_objects"]] #unicode is not json serializable

	project = project.to_native()

	# save project.conf
	projres = ProjectResults(project)
	projres.save_def()

	return project
Ejemplo n.º 26
0
Archivo: engine.py Proyecto: bbglab/wok
	def _join(self):
		"Joiner thread"

		set_thread_title()

		self._num_alive_threads += 1

		_log = logger.get_logger("wok.engine.join")

		_log.debug("Engine join thread ready")

		session = None

		num_exc = 0

		while self._running:
			try:
				set_thread_title("waiting")

				job_info = self.__queue_adaptative_get(self._join_queue)
				if job_info is None:
					continue

				workitem_id, job_id = job_info

				with self._lock:
					session = db.Session()

					workitem = session.query(db.WorkItem).filter(db.WorkItem.id == workitem_id).one()

					case = self._cases_by_name[workitem.case.name]
					task = case.component(workitem.task.cname)

					set_thread_title(task.cname)

					#_log.debug("Joining work-item %s ..." % task.cname)

					jr = task.platform.jobs.join(job_id)

					wr = Data.element(dict(
							hostname=jr.hostname,
							created=jr.created.strftime(_DT_FORMAT) if jr.created is not None else None,
							started=jr.started.strftime(_DT_FORMAT) if jr.started is not None else None,
							finished=jr.finished.strftime(_DT_FORMAT) if jr.finished is not None else None,
							exitcode=jr.exitcode.code if jr.exitcode is not None else None))

					r = task.platform.data.load_workitem_result(case.name, task.cname, workitem.index)

					if r is not None:
						if r.exception is not None:
							wr["exception"] = r.exception
						if r.trace is not None:
							wr["trace"] = r.trace

					workitem.substate = None
					workitem.result = wr

					case.num_active_workitems -= 1

					session.commit()

					# check if there are still more work-items
					num_workitems = session.query(func.count(db.WorkItem.id)).filter(
						~db.WorkItem.state.in_(runstates.TERMINAL_STATES)).scalar()

					if self._single_run and num_workitems == 0:
						stop_engine = True
						for case in self._cases:
							stop_engine = stop_engine and (case.state in runstates.TERMINAL_STATES)
						#self._running = not stop_engine
						if stop_engine:
							self._finished_event.set()

					_log.debug("[{}] Joined work-item {}".format(case.name, workitem.cname))

					# check stopping instances
					if case in self._stopping_cases:
						job_ids = self._stopping_cases[case]
						if job_id in job_ids:
							job_ids.remove(job_id)

						if len(job_ids) == 0:
							del self._stopping_cases[case]
							if case.state == runstates.ABORTING:
								workitem.case.state = case.state = runstates.ABORTED

							session.commit()

							if case.removed:
								self.__remove_case(session, case)
								session.commit()
						else:
							_log.debug("Still waiting for {} jobs to stop".format(len(job_ids)))

					if case.state in runstates.TERMINAL_STATES and case.num_active_workitems == 0:
						_log.info("[{}] Case {}. Total time: {}".format(case.name, case.state.title, str(case.elapsed)))

						self._lock.release()
						try:
							self.case_finished.send(case)
						finally:
							self._lock.acquire()

			except BaseException as ex:
				num_exc += 1
				_log.warn("Exception in join thread ({}): {}".format(num_exc, str(ex)))
				from traceback import format_exc
				_log.debug(format_exc())

				try:
					if session is not None:
						session.rollback()
				except Exception as ex:
					_log.warn("Session rollback failed")
					_log.exception(ex)

			finally:
				try:
					if session is not None:
						session.close()
				except Exception as ex:
					_log.warn("Session close failed")
					_log.exception(ex)

		self._num_alive_threads -= 1

		_log.debug("Engine join thread finished")
Ejemplo n.º 27
0
        return self.engine.exists_case(engine_case_name) or exists_in_db()

    def create_case(self,
                    user,
                    case_name,
                    conf_builder,
                    project_name,
                    flow_name,
                    properties=None,
                    start=True):
        case = Case(owner_id=user.id,
                    name=case_name,
                    project_name=project_name,
                    flow_name=flow_name,
                    conf=conf_builder.get_conf(),
                    properties=Data.element(properties))

        session = db.Session()
        session.add(case)
        session.commit()

        engine_case_name = "{}-{}".format(user.nick, case_name)
        #while self.engine.exists_case(engine_case_name):
        #	engine_case_name = "{}-{}".format(user.nick, uuid4().hex[-6:])

        engine_case = self.engine.create_case(engine_case_name, conf_builder,
                                              project_name, flow_name)

        case.created = engine_case.created
        case.engine_name = engine_case_name
        session.commit()
Ejemplo n.º 28
0
	def project_conf(self, project_name, platform_name=None):
		if project_name not in self._projects:
			return Data.element()

		project = self._projects[project_name]
		return project.get_conf(platform_name=platform_name)
Ejemplo n.º 29
0
class NativeCommmandBuilder(CommmandBuilder):
    def _plain_conf(self, value, path=None):
        if path is None:
            path = []

        if not Data.is_element(value):
            yield (".".join(path), value)
        else:
            for key in value.keys():
                for k, v in self._plain_conf(value[key], path + [key]):
                    yield (k, v)

    def prepare(self, case, task, index):
        execution = task.execution
        exec_conf = execution.conf
        if exec_conf is None:
            exec_conf = Data.element()

        if "script_path" not in exec_conf:
            raise MissingValueError("script_path")

        script_path = exec_conf["script_path"]

        lang = exec_conf.get("language", "python")

        case_conf = case.conf.clone().expand_vars()

        # Environment variables
        env = Data.element()
        #for k, v in os.environ.items():
        #	env[k] = v
        env.merge(task.conf.get(rtconf.TASK_ENV))
        env.merge(exec_conf.get("env"))

        # Default module script path
        platform_project_path = task.conf.get(rtconf.PROJECT_PATH,
                                              case.project.path)
        flow_path = os.path.abspath(os.path.dirname(task.flow_path))
        flow_rel_path = os.path.relpath(flow_path, case.project.path)
        platform_script_path = os.path.join(platform_project_path,
                                            flow_rel_path, script_path)
        env[ENV_PROJECT_PATH] = platform_project_path
        env[ENV_FLOW_PATH] = flow_rel_path
        env[ENV_SCRIPT_PATH] = script_path
        env[ENV_PLATFORM_SCRIPT_PATH] = platform_script_path

        script = []

        sources = task.conf.get(rtconf.TASK_SOURCES, default=Data.list)
        if isinstance(sources, basestring):
            sources = Data.list([sources])

        for source in sources:
            script += ['source "{}"'.format(source)]

        if lang == "python":
            virtualenv = task.conf.get(rtconf.TASK_PYTHON_VIRTUALENV)
            if virtualenv is not None:
                #script += ["set -x"]
                #script += ["echo Activating virtualenv {} ...".format(virtualenv)]
                script += [
                    'source "{}"'.format(
                        os.path.join(virtualenv, "bin", "activate"))
                ]
                #script += ["set +x"]

            #script += ["echo Running workitem ..."]

            cmd = [task.conf.get(rtconf.TASK_PYTHON_BIN, "python")]
            cmd += ["${}".format(ENV_PLATFORM_SCRIPT_PATH)]

            lib_path = task.conf.get(rtconf.TASK_PYTHON_LIBS)
            if lib_path is not None:
                if Data.is_list(lib_path):
                    lib_path = ":".join(lib_path)

                if "PYTHONPATH" in env:
                    env["PYTHONPATH"] = lib_path + ":" + env["PYTHONPATH"]
                else:
                    env["PYTHONPATH"] = lib_path
        else:
            raise LanguageError(lang)

        cmd += [
            "-D", "case={}".format(case.name), "-D",
            "task={}".format(task.cname), "-D", "index={}".format(index)
        ]

        #for key, value in self._storage_conf(workitem.case.engine.storage.basic_conf):
        #	cmd += ["-D", "storage.{}={}".format(key, value)]

        for key, value in self._plain_conf(
                Data.create(task.platform.data.context_conf(CTX_EXEC))):
            cmd += ["-D", "data.{}={}".format(key, value)]

        for key, value in self._plain_conf(
                task.platform.storage.context_conf(CTX_EXEC)):
            cmd += ["-D", "storage.{}={}".format(key, value)]

        script += [" ".join(cmd)]

        return "\n".join(script), env.to_native()
Ejemplo n.º 30
0
    def project_conf(self, project_name, platform_name=None):
        if project_name not in self._projects:
            return Data.element()

        project = self._projects[project_name]
        return project.get_conf(platform_name=platform_name)
Ejemplo n.º 31
0
def compute(project):
	log = task.logger

	config = GlobalConfig(task.conf)
	paths = PathsConfig(config)

	projects_out_port = task.ports("projects_out")

	project_id = project["id"]
	log.info("--- [{0}] --------------------------------------------".format(project_id))

	ofm = Data.element(project["oncodrivefm"])

	feature = ofm["feature"]
	slice_name = ofm["slice"]
	estimator = ofm.get("estimator")
	num_cores = ofm.get("num_cores", dtype=str)
	num_samplings = ofm.get("num_samplings", dtype=str)
	samples_threshold = ofm.get("samples_threshold", dtype=str)
	filter_enabled = ofm.get("filter_enabled", dtype=bool)
	filter_path = ofm.get("filter_path", dtype=str)

	log.info("feature = {0}".format(feature))
	log.info("slice = {0}".format(slice_name))
	log.info("estimator = {0}".format(estimator))
	log.info("num_cores = {0}".format(num_cores))
	log.info("num_samplings = {0}".format(num_samplings))
	log.info("samples_threshold = {0}".format(samples_threshold))
	log.info("filter_enabled = {0}".format(filter_enabled))
	log.info("filter_path = {0}".format(os.path.basename(filter_path)))

	cmd = [
		"oncodrivefm-compute",
		"-o", project["temp_path"],
		"-n oncodrivefm-{0}".format(feature),
		"-N", num_samplings,
		"--threshold", samples_threshold,
		"-e {0}".format(estimator),
		"-j", num_cores,
		"--slices '{0}'".format(slice_name)]

	if filter_enabled:
		cmd += ["--filter", filter_path]

	if feature == "pathways":
		cmd += ["-m", paths.data_kegg_path("ensg_kegg.tsv")]

	cmd += [ofm["data"]]

	project["oncodrivefm"] = dict(
		feature=feature,
		slice=slice_name,
		results=os.path.join(project["temp_path"], "oncodrivefm-{0}-{1}.tsv".format(feature, slice_name)))

	cmd = " ".join(cmd)

	log.debug(cmd)

	ret_code = subprocess.call(cmd, shell=True)
	if ret_code != 0:
		raise Exception("OncodriveFM error while computing {0}:\n{1}".format(feature, cmd))

	projects_out_port.send(project)