def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) if output_file is not None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() # publish to message bus. if len(results.table_output.keys()) > 0: headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for _, v in results.table_output.items(): for r in v: for key, value in r.iteritems(): if isinstance(value, bool): value = int(value) for item in units: _analysis['unit'] = item analysis_topic = topics.ANALYSIS_VALUE( point=key, **_analysis) self.publish_json(analysis_topic, headers, value) # mytime = int(time.time()) # content = { # analysis_topic: { # "Readings": [[mytime, value]], # "Units": "TU", # "data_type": "double" # } # } # self.publish_json(topics.LOGGER_LOG, headers, # content) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task()
def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) # publish to output file if available. if output_file != None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() # publish to message bus. if len(results.table_output.keys()) > 0: now = utils.format_timestamp(self.received_input_datetime) headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: now, headers_mod.TIMESTAMP: now } for _, v in results.table_output.items(): for r in v: for key, value in r.iteritems(): if isinstance(value, bool): value = int(value) topic = topics.ANALYSIS_VALUE( point=key, ** config['device']) #.replace('{analysis}', key) #print "publishing {}->{}".format(topic, value) self.publish_json(topic, headers, value) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task()
def publish_analysis_results(self, results): """ Publish table_data in analysis results to the message bus for capture by the data historian. :param results: Results object containing commands for devices, log messages and table data. :type results: Results object \\volttron.platform.agent.driven :returns: Same as results param. :rtype: Results object \\volttron.platform.agent.driven""" headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for app, analysis_table in results.table_output.items(): try: name_timestamp = app.split('&') _name = name_timestamp[0] timestamp = name_timestamp[1] except: _name = app timestamp = str(self.received_input_datetime) headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: timestamp, } for entry in analysis_table: for key, value in entry.items(): for _device in command_devices: analysis['unit'] = _device analysis_topic = topics.ANALYSIS_VALUE(point=key, **analysis) datatype = 'float' if isinstance(value, int): datatype = 'int' kbase = key[key.rfind('/') + 1:] message = [{kbase: value}, {kbase: {'tz': 'US/Pacific', 'type': datatype, 'units': 'float', } }] self.vip.pubsub.publish( 'pubsub', analysis_topic, headers, message) return results
def publish_analysis_results(self, results): """publish analysis results to the message bus for capture by the data historian """ headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for app, analysis_table in list(results.table_output.items()): try: name_timestamp = app.split('&') _name = name_timestamp[0] timestamp = name_timestamp[1] except: _name = app timestamp = str(self.received_input_datetime) headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: timestamp, } for entry in analysis_table: for key, value in list(entry.items()): for _device in command_devices: analysis['unit'] = _device analysis_topic = topics.ANALYSIS_VALUE(point=key, **analysis) datatype = 'float' if isinstance(value, int): datatype = 'int' kbase = key[key.rfind('/') + 1:] message = [{ kbase: value }, { kbase: { 'tz': 'US/Pacific', 'type': datatype, 'units': 'float', } }] self.vip.pubsub.publish('pubsub', analysis_topic, headers, message) return results
def publish_analysis_results(self, results): """ Publish table_data in analysis results to the message bus for capture by the data historian. :param results: Results object containing commands for devices, log messages and table data. :type results: Results object \\volttron.platform.agent.driven :returns: Same as results param. :rtype: Results object \\volttron.platform.agent.driven""" headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for app, analysis_table in results.table_output.items(): try: name_timestamp = app.split('&') _name = name_timestamp[0] timestamp = name_timestamp[1] except: _name = app timestamp = str(self.received_input_datetime) headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: timestamp, } # The keys in this publish should look like the following # with the values being a dictionary of points off of these # base topics # # Schedule-Reset ACCx/data/interior_ahu/vav1600e # Schedule-Reset ACCx/data/interior_ahu/vav1534 to_publish = defaultdict(list) for entry in analysis_table: for key, value in entry.items(): for _device in command_devices: analysis['unit'] = _device analysis_topic = topics.ANALYSIS_VALUE(point=key, **analysis) datatype = 'float' if isinstance(value, int): datatype = 'int' kbase = key[key.rfind('/') + 1:] topic_without_point = analysis_topic[: analysis_topic .rfind('/')] if not to_publish[topic_without_point]: to_publish[topic_without_point] = [{}, {}] to_publish[topic_without_point][0][kbase] = value to_publish[topic_without_point][1][kbase] = { 'tz': 'US/Pacific', 'type': datatype, 'units': 'float', } for equipment, _analysis in to_publish.items(): self.vip.pubsub.publish('pubsub', equipment, headers, _analysis) to_publish.clear() return results
def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.items(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.items(): _log.debug("TABLE: {}->{}".format(key, value)) if output_file is not None: _log.debug('Writing Output File!') if len(results.table_output.keys()) > 0: for v in results.table_output.values(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() _log.debug('File Written!') def get_unit(point): ''' Get a unit type based upon the regular expression in the config file. if NOT found returns percent as a default unit. ''' _log.debug('In get unit!') for k, v in unittype_map.items(): if re.match(k, point): return v return 'percent' # publish to message bus. _log.debug('Publishing table output to message bus') if len(results.table_output.keys()) > 0: headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for v in list(results.table_output.values()): for r in v: for key, value in list(r.items()): if isinstance(value, bool): value = int(value) for item in units: _analysis['unit'] = item analysis_topic = topics.ANALYSIS_VALUE( point=key, **_analysis) datatype = 'float' if isinstance(value, int): datatype = 'int' kbase = key[key.rfind('/') + 1:] message = [{ kbase: value }, { kbase: { 'tz': 'US/Pacific', 'type': datatype, 'units': 'float', } }] self.vip.pubsub.publish(peer="pubsub", topic=analysis_topic, headers=headers, message=message) _log.debug('Publishing commands to message bus') _log.debug(mode) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = list(self.commands.keys()) _log.debug("we have commands") #self.schedule_task() self.command_equip()
class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self.keys = None self._device_states = {} self._required_subdevice_values = subdevices self._subdevice_values = {} self._kwargs = kwargs self.commands = {} self.current_point = None self.current_key = None if output_file != None: with open(output_file, 'w') as writer: writer.close() self._header_written = False def initialize_subdevices(self): self._subdevice_values = {} for r in self._required_subdevice_values: for s in r: self._subdevice_values[r][s] = None def should_run_now(self): if len(self._required_subdevice_values) < 1: return True def has_subdevice_value(unit, subdevice): return self.subdevice_value[unit][subdevice] != None for r in self._required_subdevice_values: for s in r: if not has_subdevice_value(r, s): return False return True @matching.match_exact(topics.DEVICES_VALUE(point='all', **device)) def on_received_message(self, topic, headers, message, matched): '''Subscribe to device data and convert data to correct type for the driven application. ''' _log.debug("Message received") _log.debug("MESSAGE: " + jsonapi.dumps(message[0])) _log.debug("TOPIC: " + topic) data = jsonapi.loads(message[0]) if not converter.initialized and \ config.get('conversion_map') is not None: converter.setup_conversion_map(config.get('conversion_map'), data.keys()) data = converter.process_row(data) if len(self._required_subdevice_values) < 1: results = app_instance.run(datetime.now(), data) self._process_results(results) else: # apply data to subdevice values. if self.should_run_now(): results = app_instance.run(datetime.now(), self._subdevice_values) self._process_results(results) @matching.match_exact(topics.ANALYSIS_VALUE(point='all', **device)) def on_rec_analysis_message(self, topic, headers, message, matched): print('here!') def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) if output_file != None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task() def schedule_task(self): '''Schedule access to modify device controls.''' _log.debug('Schedule Device Access') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id, 'priority': 'LOW' } start = datetime.now() end = start + td(seconds=30) start = str(start) end = str(end) self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, [["{campus}/{building}/{unit}".format(**device), start, end]]) def command_equip(self): '''Execute commands on configured device.''' self.current_key = self.keys[0] value = self.commands[self.current_key] headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=self.current_key, **device), headers, str(value)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): '''Actuator response (FAILURE, SUCESS).''' print 'Actuator Response' msg = jsonapi.loads(message[0]) msg = msg['result'] _log.debug('Schedule Device ACCESS') if self.keys: if msg == "SUCCESS": self.command_equip() elif msg == "FAILURE": print 'auto correction failed' _log.debug('Auto-correction of device failed.') @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device)) def on_set_result(self, topic, headers, message, match): '''Setting of point on device was successful.''' print ('Set Success: {point} - {value}' .format(point=self.current_key, value=str(self.commands[self.current_key]))) _log.debug('set_point({}, {})'. format(self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: print 'Done with Commands - Release device lock.' headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device)) def on_set_error(self, topic, headers, message, match): '''Setting of point on device failed, log failure message.''' print 'Set ERROR' msg = jsonapi.loads(message[0]) msg = msg['type'] _log.debug('Actuator Error: ({}, {}, {})'. format(msg, self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None def publish_to_smap(self, smap_identifier, value, smap_identifier2, value2, time_value): ''' Push diagnostic results and energy impact to sMAP historian. ''' self._log.debug(''.join(['Push to sMAP - ', smap_identifier, str(dx_msg), ' Energy Impact: ', str(energy_impact)])) if time_value is None: mytime = int(time.time()) else: mytime = time.mktime(time_value.timetuple()) if value2 is not None: content = { smap_identifier: { "Readings": [[mytime, value]], "Units": "TU", "data_type": "double" }, smap_identifier2: { "Readings": [[mytime, value2]], "Units": "kWh/h", "data_type": "double"} } else: content = { smap_identifier: { "Readings": [[mytime, value]], "Units": "TU", "data_type": "double" } } self._agent.publish(self.smap_path, self.headers, jsonapi.dumps(content))