def publish_message(self, data, routing_key): """Publish a message to exchange/${pulse_user}/pulse_actions.""" msg = GenericMessage() msg.routing_parts = routing_key.split('.') for key, value in data.iteritems(): msg.set_data(key, value) self.publisher.publish(msg)
def publish_message(publisher, data, routing_key): msg = GenericMessage() msg.routing_parts = routing_key.split('.') assert(isinstance(data, dict)) for key, value in data.iteritems(): msg.set_data(key, value) failures = [] while True: # keep re-trying in case of failure try: publisher.publish(msg) break except Exception: #logger.exception(routing_key) traceback.print_exc() now = datetime.datetime.now() failures = [x for x in failures if now - x < datetime.timedelta(seconds=60)] failures.append(now) if len(failures) >= 5: failures = [] time.sleep(5 * 60) else: time.sleep(5)
def publish_message(self, data, routing_key): """Publish a message to exchange/adusca/experiment.""" msg = GenericMessage() msg.routing_parts = routing_key.split('.') for key, value in data.iteritems(): msg.set_data(key, value) self.publisher.publish(msg)
def response_agent_info(data_payload): """ Return back the received MetaTask UID and agent information to Pulse "mgt" topic channel. """ # the debug information key name DEBUG_QUEUE_TYPE = 'debug_queue_type' DEBUG_COMMAND_NAME = 'debug_command_name' DEBUG_OVERWRITE_COMMAND_CONFIGS = 'debug_overwrite_command_configs' DEBUG_UID = 'debug_UID' # define mgt keys PULSE_MGT_TOPIC = 'mgt' PULSE_MGT_OBJECT_KEY = 'message' PULSE_MGT_OBJECT_TASK_UID = 'task_uid' PULSE_MGT_OBJECT_HOSTNAME = 'hostname' PULSE_MGT_OBJECT_IP = 'ip' PULSE_MGT_OBJECT_TOPIC = 'topic' PULSE_MGT_OBJECT_PLATFORM = 'platform' PULSE_MGT_OBJECT_CMD_NAME = 'cmd_name' PULSE_MGT_OBJECT_CMD_CFG = 'cmd_configs' PULSE_MGT_OBJECT_QUEUE_TYPE = 'queue_type' # getting the agent information ret_uid = data_payload.get(DEBUG_UID, '') ret_hostname = socket.gethostname() ret_ip = socket.gethostbyname(socket.gethostname()) ret_topic = get_topic() ret_platform = sys.platform ret_cmd_name = data_payload.get(DEBUG_COMMAND_NAME, '') ret_cmd_cfg = data_payload.get(DEBUG_OVERWRITE_COMMAND_CONFIGS, '') ret_queue_type = data_payload.get(DEBUG_QUEUE_TYPE, '') # prepare the sending data data = { PULSE_MGT_OBJECT_TASK_UID: ret_uid, PULSE_MGT_OBJECT_HOSTNAME: ret_hostname, PULSE_MGT_OBJECT_IP: ret_ip, PULSE_MGT_OBJECT_TOPIC: ret_topic, PULSE_MGT_OBJECT_PLATFORM: ret_platform, PULSE_MGT_OBJECT_CMD_NAME: ret_cmd_name, PULSE_MGT_OBJECT_CMD_CFG: ret_cmd_cfg, PULSE_MGT_OBJECT_QUEUE_TYPE: ret_queue_type } # check queue queue_exists = HasalPulsePublisher.check_pulse_queue_exists( username=username, password=password, topic=topic) if not queue_exists: logging.error( 'There is not Queue for Topic [{topic}]. Message might be ignored.' .format(topic=topic)) # make publisher p = HasalPublisher(user=username, password=password) # prepare message mymessage = GenericMessage() # setup topic mymessage.routing_parts.append(PULSE_MGT_TOPIC) mymessage.set_data(PULSE_MGT_OBJECT_KEY, data) # send p.publish(mymessage) # disconnect p.disconnect()
def publish_message(publisherClass, logger, data, routing_key, pulse_cfg): assert(isinstance(data, dict)) msg = GenericMessage() msg.routing_parts = routing_key.split('.') for key, value in data.iteritems(): msg.set_data(key, value) failures = [] while True: try: publisher = publisherClass(connect=False) if pulse_cfg: publisher.config = pulse_cfg publisher.publish(msg) break except Exception: now = datetime.datetime.now() logger.exception('Failure when publishing %s' % routing_key) failures = [x for x in failures if now - x < datetime.timedelta(seconds=60)] failures.append(now) if len(failures) >= 5: logger.warning('%d publish failures within one minute.' % len(failures)) failures = [] sleep_time = 5 * 60 else: sleep_time = 5 logger.warning('Sleeping for %d seconds.' % sleep_time) time.sleep(sleep_time) logger.warning('Retrying...')
def response_agent_info(data_payload): """ Return back the received MetaTask UID and agent information to Pulse "mgt" topic channel. """ # the debug information key name DEBUG_QUEUE_TYPE = 'debug_queue_type' DEBUG_COMMAND_NAME = 'debug_command_name' DEBUG_OVERWRITE_COMMAND_CONFIGS = 'debug_overwrite_command_configs' DEBUG_UID = 'debug_UID' # define mgt keys PULSE_MGT_TOPIC = 'mgt' PULSE_MGT_OBJECT_KEY = 'message' PULSE_MGT_OBJECT_TASK_UID = 'task_uid' PULSE_MGT_OBJECT_HOSTNAME = 'hostname' PULSE_MGT_OBJECT_IP = 'ip' PULSE_MGT_OBJECT_TOPIC = 'topic' PULSE_MGT_OBJECT_PLATFORM = 'platform' PULSE_MGT_OBJECT_CMD_NAME = 'cmd_name' PULSE_MGT_OBJECT_CMD_CFG = 'cmd_configs' PULSE_MGT_OBJECT_QUEUE_TYPE = 'queue_type' # getting the agent information ret_uid = data_payload.get(DEBUG_UID, '') ret_hostname = socket.gethostname() ret_ip = socket.gethostbyname(socket.gethostname()) ret_topic = get_topic() ret_platform = sys.platform ret_cmd_name = data_payload.get(DEBUG_COMMAND_NAME, '') ret_cmd_cfg = data_payload.get(DEBUG_OVERWRITE_COMMAND_CONFIGS, '') ret_queue_type = data_payload.get(DEBUG_QUEUE_TYPE, '') # prepare the sending data data = { PULSE_MGT_OBJECT_TASK_UID: ret_uid, PULSE_MGT_OBJECT_HOSTNAME: ret_hostname, PULSE_MGT_OBJECT_IP: ret_ip, PULSE_MGT_OBJECT_TOPIC: ret_topic, PULSE_MGT_OBJECT_PLATFORM: ret_platform, PULSE_MGT_OBJECT_CMD_NAME: ret_cmd_name, PULSE_MGT_OBJECT_CMD_CFG: ret_cmd_cfg, PULSE_MGT_OBJECT_QUEUE_TYPE: ret_queue_type } # check queue queue_exists = HasalPulsePublisher.check_pulse_queue_exists(username=username, password=password, topic=topic) if not queue_exists: logging.error('There is not Queue for Topic [{topic}]. Message might be ignored.'.format(topic=topic)) # make publisher p = HasalPublisher(user=username, password=password) # prepare message mymessage = GenericMessage() # setup topic mymessage.routing_parts.append(PULSE_MGT_TOPIC) mymessage.set_data(PULSE_MGT_OBJECT_KEY, data) # send p.publish(mymessage) # disconnect p.disconnect()
def publish_build(commit, filename): publisher = B2GPulsePublisher() msg = GenericMessage() msg.routing_parts = ['b2g', 'qemu', 'build', 'available'] msg.set_data('buildurl', 'http://builder.boot2gecko.org/%s' % os.path.basename(filename)) msg.set_data('commit', commit) publisher.publish(msg)
def publish_message(publisherClass, logger, data, routing_key, pulse_cfg): assert (isinstance(data, dict)) msg = GenericMessage() msg.routing_parts = routing_key.split('.') for key, value in data.iteritems(): msg.set_data(key, value) failures = [] while True: try: publisher = publisherClass(connect=False) if pulse_cfg: publisher.config = pulse_cfg publisher.publish(msg) break except Exception: now = datetime.datetime.now() logger.exception('Failure when publishing %s' % routing_key) failures = [ x for x in failures if now - x < datetime.timedelta(seconds=60) ] failures.append(now) if len(failures) >= 5: logger.warning('%d publish failures within one minute.' % len(failures)) failures = [] sleep_time = 5 * 60 else: sleep_time = 5 logger.warning('Sleeping for %d seconds.' % sleep_time) time.sleep(sleep_time) logger.warning('Retrying...')
def push_meta_task(self, topic, command_name, overwrite_cmd_configs=None, uid=''): """ Push MetaTask into Pulse. @param topic: The topic channel. @param command_name: the specified command name, which base on cmd_config.json. @param overwrite_cmd_configs: overwrite the Command's config. @param uid: unique ID string. @return: """ # get MetaTask meta_task = self.get_meta_task(command_name, overwrite_cmd_configs=overwrite_cmd_configs) if not meta_task: self.logger.error('Skip pushing task.') pickle_meta_task = pickle.dumps(meta_task) # make publisher p = HasalPublisher(user=self.username, password=self.password) # prepare message mymessage = GenericMessage() # setup topic mymessage.routing_parts.append(topic) mymessage.set_data(PULSE_KEY_TASK, pickle_meta_task) # for debugging mymessage.set_data(self.DEBUG_QUEUE_TYPE, meta_task.queue_type) mymessage.set_data(self.DEBUG_COMMAND_NAME, command_name) mymessage.set_data(self.DEBUG_COMMAND_CONFIG, self.command_config) mymessage.set_data(self.DEBUG_OVERWRITE_COMMAND_CONFIGS, overwrite_cmd_configs) mymessage.set_data(self.DEBUG_UID, uid) # send p.publish(mymessage) # disconnect p.disconnect()
def push_meta_task(self, topic, command_name, overwrite_cmd_configs=None, uid=''): """ Push MetaTask into Pulse. @param topic: The topic channel. @param command_name: the specified command name, which base on cmd_config.json. @param overwrite_cmd_configs: overwrite the Command's config. @param uid: unique ID string. @return: """ # get MetaTask meta_task = self.get_meta_task( command_name, overwrite_cmd_configs=overwrite_cmd_configs) if not meta_task: self.logger.error('Skip pushing task.') pickle_meta_task = pickle.dumps(meta_task) # make publisher p = HasalPublisher(user=self.username, password=self.password) # prepare message mymessage = GenericMessage() # setup topic mymessage.routing_parts.append(topic) mymessage.set_data(PULSE_KEY_TASK, pickle_meta_task) # for debugging mymessage.set_data(self.DEBUG_QUEUE_TYPE, meta_task.queue_type) mymessage.set_data(self.DEBUG_COMMAND_NAME, command_name) mymessage.set_data(self.DEBUG_COMMAND_CONFIG, self.command_config) mymessage.set_data(self.DEBUG_OVERWRITE_COMMAND_CONFIGS, overwrite_cmd_configs) mymessage.set_data(self.DEBUG_UID, uid) # send p.publish(mymessage) # disconnect p.disconnect()