def on_match(self, peer, sender, bus, topic, headers, message): """Use match_all to receive all messages and print them out.""" if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._logfn( "Peer: %r, Sender: %r:, Bus: %r, Topic: %r, Headers: %r, " "Message: %r", peer, sender, bus, topic, headers, message)
def on_match(self, peer, sender, bus, topic, headers, message): #Use match_all to receive all messages and print them out. if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) if topic == "Bidding": _log.info("Topic: %r, from, %r, Demand curve: %r", topic, headers.get('AgentID'), message) '''Curve Dictionary of Homeowner Agents {AgentID: Bidding Price, Bidding Quantity} ''' self.hwA_curves[headers.get('AgentID')] = [message[1], message[3]] #Wait until getting all bidding/demand curves if len(self.hwA_curves) == self.number_of_hwA: clearing_price, clearing_quantity, revenue = self.compute_clearing_price( self.hwA_curves) message1 = [clearing_price, clearing_quantity] self.vip.pubsub.publish('pubsub', 'clearing price', headers, message1) _log.info( 'clearing price is: %r, clearing quantity is %r, revenue: %r', clearing_price, clearing_quantity, revenue) else: _log.info('Waiting for bidding from homeownerAgent...') elif (topic == "Load Status"): _log.info("message = %r", message)
def on_match(self, peer, sender, bus, topic, headers, message): '''Use match_all to receive all messages and print them out.''' if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) _log.debug( "Peer: %r, Sender: %r:, Bus: %r, Topic: %r, Headers: %r, " "Message: %r", peer, sender, bus, topic, headers, message)
def capture_data(self, peer, sender, bus, topic, headers, message): # Grab the timestamp string from the message (we use this as the # value in our readings at the end of this method) _log.debug("In capture data") timestamp_string = headers.get(headers_mod.DATE, None) data = message try: # 2.0 agents compatability layer makes sender = pubsub.compat # so we can do the proper thing when it is here _log.debug("message in capture_data {}".format(message)) if sender == 'pubsub.compat': # data = jsonapi.loads(message[0]) data = compat.unpack_legacy_message(headers, message) _log.debug("data in capture_data {}".format(data)) if isinstance(data, dict): data = data elif isinstance(data, int) or \ isinstance(data, float) or \ isinstance(data, long): data = data # else: # data = data[0] except ValueError as e: log_message = "message for {topic} bad message string:" \ "{message_string}" _log.error( log_message.format(topic=topic, message_string=message[0])) raise if topic_replace_list: if topic in self._topic_replace_map.keys(): topic = self._topic_replace_map[topic] else: self._topic_replace_map[topic] = topic temptopics = {} for x in topic_replace_list: if x['from'] in topic: new_topic = temptopics.get(topic, topic) temptopics[topic] = new_topic.replace( x['from'], x['to']) for k, v in temptopics.items(): self._topic_replace_map[k] = v topic = self._topic_replace_map[topic] if gather_timing_data: add_timing_data_to_header( headers, self.core.agent_uuid or self.core.identity, "collected") payload = {'headers': headers, 'message': data} self._event_queue.put({ 'source': "forwarded", 'topic': topic, 'readings': [(timestamp_string, payload)] })
def on_match(self, peer, sender, bus, topic, headers, message): """Use match_all to receive all messages and print them out.""" if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._logfn( "Peer: {0}, Sender: {1}:, Bus: {2}, Topic: {3}, Headers: {4}, " "Message: \n{5}".format(peer, sender, bus, topic, headers, pformat(message)))
def on_match(self, peer, sender, bus, topic, headers, message): """Use match_all to receive all messages and print them out.""" if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._logfn( "Peer: %r, Sender: %r:, Bus: %r, Topic: %r, Headers: %r, " "Message: \n%s", peer, sender, bus, topic, headers, pformat(message))
def _capture_log_data(self, peer, sender, bus, topic, headers, message): """Capture log data and submit it to be published by a historian.""" # Anon the topic if necessary. topic = self._get_topic(topic) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) else: data = message except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return source = 'log' _log.debug( "Queuing {topic} from {source} for publish".format(topic=topic, source=source)) _log.debug(data) for point, item in data.iteritems(): # ts_path = location + '/' + point if 'Readings' not in item or 'Units' not in item: _log.error("logging request for {topic} missing Readings " "or Units".format(topic=topic)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(get_aware_utc_now(), readings)] elif isinstance(readings[0], str): my_ts, my_tz = process_timestamp(readings[0], topic) readings = [(my_ts, readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._event_queue.put({'source': source, 'topic': topic + '/' + point, 'readings': readings, 'meta': meta})
def _capture_log_data(self, peer, sender, bus, topic, headers, message): """Capture log data and submit it to be published by a historian.""" # Anon the topic if necessary. topic = self._get_topic(topic) try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) else: data = message except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return source = 'log' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for point, item in data.iteritems(): # ts_path = location + '/' + point if 'Readings' not in item or 'Units' not in item: _log.error("logging request for {topic} missing Readings " "or Units".format(topic=topic)) continue units = item['Units'] dtype = item.get('data_type', 'float') tz = item.get('tz', None) if dtype == 'double': dtype = 'float' meta = {'units': units, 'type': dtype} readings = item['Readings'] if not isinstance(readings, list): readings = [(get_aware_utc_now(), readings)] elif isinstance(readings[0], str): my_ts, my_tz = process_timestamp(readings[0], topic) readings = [(my_ts, readings[1])] if tz: meta['tz'] = tz elif my_tz: meta['tz'] = my_tz self._event_queue.put({ 'source': source, 'topic': topic + '/' + point, 'readings': readings, 'meta': meta })
def onNewPrice(self, peer, sender, bus, topic, headers, message): if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) new_price_point = message[0] _log.debug ( "*** New Price Point: {0:.2f} ***".format(new_price_point)) if self._price_point_current != new_price_point: self.processNewPricePoint(new_price_point)
def _capture_data(self, peer, sender, bus, topic, headers, message, device): # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) _log.debug("### In capture_data timestamp str {} ".format(timestamp)) try: _log.debug( "### In capture_data Actual message {} ".format(message)) # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': # message = jsonapi.loads(message[0]) message = compat.unpack_legacy_message(headers, message) _log.debug("### message after compat {}".format(message)) if isinstance(message, dict): values = message else: values = message[0] except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return except Exception as e: _log.exception(e) return meta = {} if not isinstance(message, dict): meta = message[1] if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug( "Queuing {topic} from {source} for publish".format(topic=topic, source=source)) for key, value in values.iteritems(): point_topic = device + '/' + key self._event_queue.put({'source': source, 'topic': point_topic, 'readings': [(timestamp, value)], 'meta': meta.get(key, {})})
def capture_data(self, peer, sender, bus, topic, headers, message): # Grab the timestamp string from the message (we use this as the # value in our readings at the end of this method) _log.debug("In capture data") timestamp_string = headers.get(headers_mod.DATE, None) data = message try: # 2.0 agents compatability layer makes sender = pubsub.compat # so we can do the proper thing when it is here _log.debug("message in capture_data {}".format(message)) if sender == 'pubsub.compat': # data = jsonapi.loads(message[0]) data = compat.unpack_legacy_message(headers, message) _log.debug("data in capture_data {}".format(data)) if isinstance(data, dict): data = data elif isinstance(data, int) or \ isinstance(data, float) or \ isinstance(data, long): data = data # else: # data = data[0] except ValueError as e: log_message = "message for {topic} bad message string:" \ "{message_string}" _log.error(log_message.format(topic=topic, message_string=message[0])) raise if topic_replace_list: if topic in self._topic_replace_map.keys(): topic = self._topic_replace_map[topic] else: self._topic_replace_map[topic] = topic temptopics = {} for x in topic_replace_list: if x['from'] in topic: new_topic = temptopics.get(topic, topic) temptopics[topic] = new_topic.replace( x['from'], x['to']) for k, v in temptopics.items(): self._topic_replace_map[k] = v topic = self._topic_replace_map[topic] if gather_timing_data: add_timing_data_to_header(headers, self.core.agent_uuid or self.core.identity, "collected") payload = {'headers': headers, 'message': data} self._event_queue.put({'source': "forwarded", 'topic': topic, 'readings': [(timestamp_string, payload)]})
def _capture_data(self, peer, sender, bus, topic, headers, message, device): # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) try: _log.debug( "### In capture_data Actual message {} ".format(message)) # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': # message = jsonapi.loads(message[0]) message = compat.unpack_legacy_message(headers, message) if isinstance(message, dict): values = message else: values = message[0] except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) return except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) return except Exception as e: _log.exception(e) return meta = {} if not isinstance(message, dict): meta = message[1] if topic.startswith('analysis'): source = 'analysis' else: source = 'scrape' _log.debug("Queuing {topic} from {source} for publish".format( topic=topic, source=source)) for key, value in values.iteritems(): point_topic = device + '/' + key self._event_queue.put({ 'source': source, 'topic': point_topic, 'readings': [(timestamp, value)], 'meta': meta.get(key, {}) })
def onDsEd(self, peer, sender, bus, topic, headers, message): if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) _log.debug('*********** New ed from ds, topic: ' + topic + \ ' & ed: {0:.4f}'.format(message[0])) deviceID = (topic.split('/', 3))[2] idx = self._get_ds_device_idx(deviceID) self._ds_ed[idx] = message[0] return
def onNewPrice(self, peer, sender, bus, topic, headers, message): if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) #new hub price point sh_pp = message[0] _log.debug ( "*** New Price Point: {0:.2f} ***".format(sh_pp)) if True: #if self._current_sh_pp != sh_pp: ss_pp = self._computeNewPrice(sh_pp) self._post_price(ss_pp)
def handle_set(self, peer, sender, bus, topic, headers, message): """ Set the value of a point. To set a value publish a message to the following topic: ``devices/actuators/set/<device path>/<actuation point>`` with the fallowing header: .. code-block:: python { 'requesterID': <Agent ID> } The ActuatorAgent will reply on the **value** topic for the actuator: ``devices/actuators/value/<full device path>/<actuation point>`` with the message set to the value the point. Errors will be published on ``devices/actuators/error/<full device path>/<actuation point>`` with the same header as the request. """ if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) point = topic.replace(topics.ACTUATOR_SET() + '/', '', 1) requester = headers.get('requesterID') headers = self._get_headers(requester) if not message: error = {'type': 'ValueError', 'value': 'missing argument'} _log.debug('ValueError: ' + str(error)) self._push_result_topic_pair(ERROR_RESPONSE_PREFIX, point, headers, error) return try: self.set_point(requester, point, message) except RemoteError as ex: self._handle_remote_error(ex, point, headers) except StandardError as ex: self._handle_standard_error(ex, point, headers)
def handle_set(self, peer, sender, bus, topic, headers, message): """ Set the value of a point. To set a value publish a message to the following topic: ``devices/actuators/set/<device path>/<actuation point>`` with the fallowing header: .. code-block:: python { 'requesterID': <Agent ID> } The ActuatorAgent will reply on the **value** topic for the actuator: ``devices/actuators/value/<full device path>/<actuation point>`` with the message set to the value the point. Errors will be published on ``devices/actuators/error/<full device path>/<actuation point>`` with the same header as the request. """ if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) point = topic.replace(topics.ACTUATOR_SET() + '/', '', 1) requester = headers.get('requesterID') headers = self._get_headers(requester) if not message: error = {'type': 'ValueError', 'value': 'missing argument'} _log.debug('ValueError: ' + str(error)) self._push_result_topic_pair(ERROR_RESPONSE_PREFIX, point, headers, error) return try: self.set_point(requester, point, message) except RemoteError as ex: self._handle_remote_error(ex, point, headers) except StandardError as ex: self._handle_standard_error(ex, point, headers)
def _capture_record_data(self, peer, sender, bus, topic, headers, message): _log.debug('Capture record data {}'.format(message)) # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._event_queue.put({ 'source': 'record', 'topic': topic, 'readings': [(timestamp, message)], 'meta': {} })
def _capture_record_data(self, peer, sender, bus, topic, headers, message): _log.debug('Capture record data {}'.format(message)) # Anon the topic if necessary. topic = self._get_topic(topic) timestamp_string = headers.get(headers_mod.DATE, None) timestamp = get_aware_utc_now() if timestamp_string is not None: timestamp, my_tz = process_timestamp(timestamp_string, topic) if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) self._event_queue.put( {'source': 'record', 'topic': topic, 'readings': [(timestamp, message)], 'meta': {}})
def parse_IEB_msgs(self, peer, sender, bus, topic, headers, message): """ parses message on IEB published to the SiteManager's specified path, and populates endpts (populate_endpts) based on message contents """ _log.debug("SiteManagerStatus: Topic found - " + str(topic)) if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) #_log.info("Message length is: "+message.len()) _log.debug("Msg: " + str(message) + "\n") if type(message) is dict: #FIXME temporary fix data = message meta_data = None else: data = message[0] meta_data = message[1] #ii = 1 #for m in message[1:]: # _log.debug("Msg: "+str(ii)+" is "+str(m)+"\n") # ii+=1 #for k, v in data.items(): # _log.info("Message is: "+k+": "+str(v)) # update the current topic's last read time to indicate data is fresh for topic_obj in self.topics: cur_topic_str = topic_obj["TopicPath"] + "/all" if cur_topic_str == topic: topic_obj["last_read_time"] = utils.get_aware_utc_now() cur_topic_name = topic_obj["TopicName"] _log.debug("SiteManagerStatus: Topic " + topic + " read at " + datetime.strftime(topic_obj["last_read_time"], "%Y-%m-%dT%H:%M:%S")) break try: self.updating = 1 # indicates that data is updating - do not trust until populate end pts is complete self.site.populate_endpts(data, self, meta_data, cur_topic_name, topic) self.dirtyFlag = 0 # clear dirtyFlag on new read self.updating = 0 except: #FIXME - this should probably look for specific error to trap, right now this is # a catch-all for any errors in parsing incoming msg _log.info("Exception: in populate end_pts!!!") pass
def onNewEnergyDemand(self, peer, sender, bus, topic, headers, message): if self._bridge_host == 'ZONE': #do nothing return _log.debug("onNewEnergyDemand()") if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) newEnergyDemand = message[0] _log.debug( "*** New Energy Demand: {0:.4f} ***".format(newEnergyDemand)) #we want to post to us only if there is change in energy demand if self._ed_current == newEnergyDemand: return self._ed_previous = self._ed_current self._ed_current = newEnergyDemand _log.debug("posting new energy demand to upstream VolttronBridge") url_root = 'http://' + self._up_ip_addr + ':' + str( self._up_port) + '/VolttronBridge' #check for upstream connection, if not retry once if self._usConnected == False: self._usConnected = self._registerToUsBridge(url_root,\ self._discovery_address,\ self._deviceId) if not self._usConnected: _log.debug('May be upstream bridge is not running!!!') return if self.do_rpc(url_root, 'rpc_postEnergyDemand', \ {'discovery_address': self._discovery_address, \ 'deviceId': self._deviceId, \ 'newEnergyDemand': newEnergyDemand }): _log.debug("Success!!!") else: _log.debug("Failed!!!") return
def _clean_compat(self, sender, topic, headers, message): try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) else: data = message return data except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) raise e except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) raise e
def _clean_compat(self, sender, topic, headers, message): try: # 2.0 agents compatability layer makes sender == pubsub.compat so # we can do the proper thing when it is here if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) else: data = message return data except ValueError as e: _log.error("message for {topic} bad message string: " "{message_string}".format(topic=topic, message_string=message[0])) raise e except IndexError as e: _log.error("message for {topic} missing message string".format( topic=topic)) raise e
def capture_data(self, peer, sender, bus, topic, headers, message): # Grab the timestamp string from the message (we use this as the # value in our readings at the end of this method) _log.debug("In capture data") timestamp_string = headers.get(headers_mod.DATE, None) data = message try: # 2.0 agents compatability layer makes sender = pubsub.compat # so we can do the proper thing when it is here _log.debug("message in capture_data {}".format(message)) if sender == 'pubsub.compat': data = compat.unpack_legacy_message(headers, message) _log.debug("data in capture_data {}".format(data)) if isinstance(data, dict): data = data elif isinstance(data, int) or \ isinstance(data, float) or \ isinstance(data, long): data = data except ValueError as e: log_message = "message for {topic} bad message string:" \ "{message_string}" _log.error( log_message.format(topic=topic, message_string=message[0])) raise topic = self.get_renamed_topic(topic) if self.gather_timing_data: add_timing_data_to_header( headers, self.core.agent_uuid or self.core.identity, "collected") payload = {'headers': headers, 'message': data} self._event_queue.put({ 'source': "forwarded", 'topic': topic, 'readings': [(timestamp_string, payload)] })
def read_msgs(self, peer, sender, bus, topic, headers, message): """ for testing purposes - parses message on IEB published to the specified path and prints. To enable, set READ_BACK_MSGS to True """ _log.info("Topic found - " + str(topic)) if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) _log.info("Msg: " + str(message) + "\n") try: #_log.info(str(message[0]["Forecast"][2][0:5])) df = pandas.DataFrame(data=message[0]["Forecast"]).transpose() df.index = pandas.Series(message[0]["Time"]) print(df) except: try: df = pandas.DataFrame(data=message[0]["Forecast"]) df.index = pandas.Series(message[0]["Time"]) print(df) except: pass
def on_match(self, peer, sender, bus, topic, headers, message): '''Use match_all to receive all messages and print them out.''' if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message)
def handle_schedule_request(self, peer, sender, bus, topic, headers, message): """ Schedule request pub/sub handler An agent can request a task schedule by publishing to the ``devices/actuators/schedule/request`` topic with the following header: .. code-block:: python { 'type': 'NEW_SCHEDULE', 'requesterID': <Agent ID>, #The name of the requesting agent. 'taskID': <unique task ID>, #The desired task ID for this task. It must be unique among all other scheduled tasks. 'priority': <task priority>, #The desired task priority, must be 'HIGH', 'LOW', or 'LOW_PREEMPT' } The message must describe the blocks of time using the format described in `Device Schedule`_. A task may be canceled by publishing to the ``devices/actuators/schedule/request`` topic with the following header: .. code-block:: python { 'type': 'CANCEL_SCHEDULE', 'requesterID': <Agent ID>, #The name of the requesting agent. 'taskID': <unique task ID>, #The task ID for the canceled Task. } requesterID The name of the requesting agent. taskID The desired task ID for this task. It must be unique among all other scheduled tasks. priority The desired task priority, must be 'HIGH', 'LOW', or 'LOW_PREEMPT' No message is requires to cancel a schedule. """ if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) request_type = headers.get('type') _log.debug( 'handle_schedule_request: {topic}, {headers}, {message}'.format( topic=topic, headers=str(headers), message=str(message))) requester_id = headers.get('requesterID') task_id = headers.get('taskID') priority = headers.get('priority') if request_type == SCHEDULE_ACTION_NEW: try: if len(message) == 1: requests = message[0] else: requests = message self.request_new_schedule(requester_id, task_id, priority, requests) except StandardError as ex: return self._handle_unknown_schedule_error( ex, headers, message) elif request_type == SCHEDULE_ACTION_CANCEL: try: self.request_cancel_schedule(requester_id, task_id) except StandardError as ex: return self._handle_unknown_schedule_error( ex, headers, message) else: _log.debug('handle-schedule_request, invalid request type') self.vip.pubsub.publish( 'pubsub', topics.ACTUATOR_SCHEDULE_RESULT(), headers, { 'result': SCHEDULE_RESPONSE_FAILURE, 'data': {}, 'info': 'INVALID_REQUEST_TYPE' })
def handle_schedule_request(self, peer, sender, bus, topic, headers, message): """ Schedule request pub/sub handler An agent can request a task schedule by publishing to the ``devices/actuators/schedule/request`` topic with the following header: .. code-block:: python { 'type': 'NEW_SCHEDULE', 'requesterID': <Agent ID>, #The name of the requesting agent. 'taskID': <unique task ID>, #The desired task ID for this task. It must be unique among all other scheduled tasks. 'priority': <task priority>, #The desired task priority, must be 'HIGH', 'LOW', or 'LOW_PREEMPT' } The message must describe the blocks of time using the format described in `Device Schedule`_. A task may be canceled by publishing to the ``devices/actuators/schedule/request`` topic with the following header: .. code-block:: python { 'type': 'CANCEL_SCHEDULE', 'requesterID': <Agent ID>, #The name of the requesting agent. 'taskID': <unique task ID>, #The task ID for the canceled Task. } requesterID The name of the requesting agent. taskID The desired task ID for this task. It must be unique among all other scheduled tasks. priority The desired task priority, must be 'HIGH', 'LOW', or 'LOW_PREEMPT' No message is requires to cancel a schedule. """ if sender == 'pubsub.compat': message = compat.unpack_legacy_message(headers, message) request_type = headers.get('type') _log.debug('handle_schedule_request: {topic}, {headers}, {message}'. format(topic=topic, headers=str(headers), message=str(message))) requester_id = headers.get('requesterID') task_id = headers.get('taskID') priority = headers.get('priority') if request_type == SCHEDULE_ACTION_NEW: try: if len(message) == 1: requests = message[0] else: requests = message self.request_new_schedule(requester_id, task_id, priority, requests) except StandardError as ex: return self._handle_unknown_schedule_error(ex, headers, message) elif request_type == SCHEDULE_ACTION_CANCEL: try: self.request_cancel_schedule(requester_id, task_id) except StandardError as ex: