def format_event(timestamp, annotation_data, settings): author_data = settings["author"] room_data = settings["room"] dashboard_data = settings["dashboard"] message_event = annotation_data.copy() message_event.update( __type="__annotations", __src=message_event.get("__src", "live_agent"), uid=message_event.get("uid", str(uuid.uuid4())), createdAt=int(message_event.get("createdAt", timestamp)), author=author_data.get("name"), room=room_data, dashboardId=dashboard_data.get("id"), dashboard=dashboard_data.get("name"), searchable=True, ) begin = int(message_event.get("begin", timestamp)) end = message_event.pop("end", None) if (end is None) or end < begin: end = begin + DEFAULT_ANNOTATION_DURATION message_event.update(begin=begin) message_event.update(end=int(end)) def has_invalid_value(key): return message_event.get(key, -1) in (0, None) if any(map(has_invalid_value, ("begin", "end", "createdAt"))): logging.warn("Invalid annotation: {}".format(message_event)) return return message_event
def async_send(queue, live_settings): with start_action(action_type="async_logger"): logging.info("Remote logger process started") setproctitle("DDA: Remote logger") live_settings.update(session=build_session(live_settings)) while True: event = queue.get() try: send_event(event, live_settings) except RequestException: logging.warn("Ignoring previous exception") pass
def __init__(self, chatbot, **kwargs): super().__init__(chatbot, **kwargs) available_assets = list_assets(kwargs["settings"]) if not available_assets: logging.warn( f"No assets available. Check permissions for this user!") self.load_state() self.state = { "assets": available_assets, "asset_names": [item.get("name") for item in available_assets if "name" in item], } self.share_state()
def maybe_send_chat_message(message, settings, **kwargs): output_settings = settings["output"] author = output_settings.get("author") room = kwargs.get("room", output_settings.get("room")) shall_send_message = (room is not None) and (author is not None) if not shall_send_message: logging.warn( f"Cannot send message, room ({room}) and/or author ({author}) missing. Message is '{message}'" ) return False # [ECS][FIXME]: Author should not be altered here. It'd be better to receive it configured from the client <<<<< author["name"] = kwargs.get("author_name") or author.get("name") connection_func = build_sender_function(settings["live"]) logging.debug("Sending message '{}' from {} to {}".format( message, author, room)) format_and_send(message, room, author, connection_func=connection_func) return True
def maybe_send_chat_message(message, timestamp, settings, **kwargs): output_settings = settings["output"] author = output_settings.get("author") room = kwargs.get("room", output_settings.get("room")) shall_send_message = (room is not None) and (author is not None) if not shall_send_message: logging.warn( f"Cannot send message, room ({room}) and/or author ({author}) missing" ) return False author["name"] = kwargs.get("author_name") or author.get("name") connection_func = build_sender_function(settings["live"]) logging.debug("Sending message '{}' from {} to {}".format( message, author, room)) format_and_send(message, room, author, timestamp=timestamp, connection_func=connection_func) return True
def start(settings, **kwargs): """ Starts the LAS replayer, based on the process settings. Replays sensors and chat data. :param settings: Parameters for this `las_replayer` instance :type settings: dict :param state_manager: StateManager injected by `live-agent` :type state_manager: live_agent.services.StateManager :rtype: NoneType The settings for this process have the following format:: { "type": "las_replay", "enabled": true, # Self explanatory "index_mnemonic": "TIME", # Curve used as index for the LAS data "path_list": [ # A list of filename pairs containing the data to be replayed [<path for a LAS file>, <path for a CSV file containing the chat logs>], ... ] "output": { "event_type": "raw_wellX", The name of the event type which should be sent to Intelie Live "author": { "id": <user id> # User id of the author for the messages "name": "Linguistics monitor" # User name of the author for the messages }, "room": { "id": <room id> # Id of the room where the messages should be sent }, "message_event": { # Information for generating markers on charts "event_type": "raw_wellX", # Usually the raw event type of the asset being monitored "mnemonic": "MSG" # Mnemonic used for messages normalization, usually named `MSG` } } } The LAS file *must* be SPACE delimited. The CSV file must contain at least 3 columns: - `MESSAGE`: The text of the message - `SOURCE`: The name of the message sender - A column with the same name a the `index_mnemonic` defined on the process settings, used for correlating messages with the events from the LAS file. """ event_type = settings["output"]["event_type"] cooldown_time = settings.get("cooldown_time", 300) setproctitle('DDA: LAS replayer for "{}"'.format(event_type)) state_manager = kwargs.get("state_manager") state = state_manager.load() iterations = state.get("iterations", 0) while True: try: success, las_data, chat_data, index_mnemonic = open_files( settings, iterations, mode=READ_MODES.CONTINUOUS) if success: generate_events(event_type, las_data, chat_data, index_mnemonic, settings, state_manager) logging.info("Iteration {} successful".format(iterations)) else: logging.warn("Could not open files") state_manager.save({ "last_timestamp": 0, "iterations": iterations }, force=True) loop.await_next_cycle( cooldown_time, message="Sleeping for {:.1f} minutes between runs".format( cooldown_time / 60.0), log_func=logging.info, ) except KeyboardInterrupt: logging.info("Stopping after {} iterations".format(iterations)) raise except Exception as e: logging.error( "Error processing events during iteration {}, {}<{}>".format( iterations, e, type(e))) iterations += 1 del las_data del chat_data return