def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self.normal_firststage_fanspeed = config.get('normal_firststage_fanspeed', 75.0) self.normal_secondstage_fanspeed = config.get('normal_secondstage_fanspeed', 90.0) self.normal_damper_stpt = config.get('normal_damper_stpt', 5.0) self.normal_coolingstpt = config.get('normal_coolingstpt', 74.0) self.normal_heatingstpt = config.get('normal_heatingstpt', 67.0) self.smap_path = config.get('smap_path') self.default_cooling_stage_differential = 0.5 self.current_spacetemp = 0.0 self.building_thermal_constant = config.get('building_thermal_constant', 4.0) self.timestep_length = config.get('timestep_length', 900) self.csp_cpp = config.get('csp_cpp', 80.0) self.csp_pre = config.get('csp_pre', 67.0) self.restore_window = int(((self.csp_cpp - self.normal_coolingstpt)/self.building_thermal_constant) *3600) self.state = 'STARTUP' self.e_start_msg = None self.error_handler = None self.actuator_handler = None self.pre_cool_idle = None self.e_start = None self.e_end = None self.pre_stored_spacetemp =None self.device_schedule = {} self.all_scheduled_events = {} self.currently_running_dr_event_handlers = [] self.headers = {headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, 'requesterID': agent_id} utils.setup_logging() self._log = logging.getLogger(__name__)
def main(argv=sys.argv): '''Main method called to start the agent.''' utils.setup_logging() try: utils.vip_main(hello_agent) except Exception: _log.exception('unhandled exception')
def main(argv=sys.argv): '''Main method called to start the agent.''' utils.setup_logging() try: utils.default_main(UIAgent, description='VOLTTRON platform™ agent for remote user interaction.', argv=argv) except Exception: _log.exception('unhandled exception')
def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) path = os.path.abspath(settings.source_file) print path self._src_file_handle = open(path) header_line = self._src_file_handle.readline().strip() self._headers = header_line.split(',') self.end_time = None self.start_time = None self.task_id = None utils.setup_logging() self._log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S')
def __init__(self, **kwargs): '''Initialize data publisher class attributes.''' super(Agent, self).__init__(**kwargs) self._agent_id = conf.get('publisherid') self._src_file_handle = open(path) header_line = self._src_file_handle.readline().strip() self._headers = header_line.split(',') self.end_time = None self.start_time = None self.task_id = None utils.setup_logging() self._log = logging.getLogger(__name__) self.scheduled_event = None logging.basicConfig( level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') self._log.info('DATA PUBLISHER ID is PUBLISHER')
def __init__(self, **kwargs): '''Initialize data publisher class attributes.''' super(Publisher, self).__init__(**kwargs) self._agent_id = conf.get('publisherid') self._src_file_handle = open(path, 'rb') # Uses dictreader so that thee first line in the file is auto # ingested and becaums the headers for the dictionary. Use the # fieldnames property to get the names of the fields available. self._reader = csv.DictReader(self._src_file_handle, delimiter=',') self.end_time = None self.start_time = None self.task_id = None utils.setup_logging() self._log = logging.getLogger(__name__) self.scheduled_event = None logging.basicConfig( level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if remember_playback: self._log.info('Keeping track of line being played in case of interuption.') else: self._log.info('Not storing line being played (enable by setting remember_playback=1 in config file') self._log.info('Publishing Starting') self._line_on = 0 start_line = self.get_start_line() # Only move the start_line if the reset_playback switch is off and # the remember_playback switch is on. if not reset_playback and remember_playback: while self._line_on - 1 < start_line: self._reader.next() self._line_on+=1 self._log.info('Playback starting on line: {}'.format(self._line_on))
def __init__(self, **kwargs): '''Initialize data publisher class attributes.''' super(Publisher, self).__init__(**kwargs) self._agent_id = conf.get('publisherid') self._src_file_handle = open(path, 'rb') # Uses dictreader so that thee first line in the file is auto # ingested and becaums the headers for the dictionary. Use the # fieldnames property to get the names of the fields available. self._reader = csv.DictReader(self._src_file_handle, delimiter=',') self.end_time = None self.start_time = None self.task_id = None utils.setup_logging() self._log = logging.getLogger(__name__) self.scheduled_event = None logging.basicConfig( level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') self._log.info('DATA PUBLISHER ID is PUBLISHER')
def passiveafdd(config_path, **kwargs): '''Passive fault detection application for AHU/RTU economizer systems''' config = utils.load_config(config_path) rtu_path = OrderedDict((key, config[key]) for key in ['campus', 'building', 'unit']) rtu_tag = '' for key in rtu_path: rtu_tag += rtu_path[key] + '-' device_topic = topics.DEVICES_VALUE(**rtu_path) + '/all' utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') class PassiveAFDD(Agent): def __init__(self, **kwargs): '''Input and initialize user configurable parameters.''' super(PassiveAFDD, self).__init__(**kwargs) # Agent Configuration parameters self.agent_id = config.get('agentid', 'passiveafdd') self.matemp = [] self.oatemp = [] self.ratemp = [] self.cool_call = [] self.compressor_status = [] self.heating_status = [] self.oa_damper = [] self.fan_status = [] self.timestamp = [] self.data_status = {} self.first_data_scrape = True self.cool_call_measured = True # supported economizer types. self.economizer_type = config.get('economizer type', 'differential_ddb').lower() self.economizer_types = ['differential_ddb', 'highlimit'] # Temperature sensor diagnostic thresholds self.mat_low = float(config.get('mat_low', 50.0)) self.mat_high = float(config.get('mat_high', 90.0)) self.oat_low = float(config.get('oat_low', 30.0)) self.oat_high = float(config.get('oat_high', 120.0)) self.rat_low = float(config.get('rat_low', 50.0)) self.rat_high = float(config.get('rat_high', 90.0)) self.temp_sensor_threshold = float(config.get('temperature sensor threshold', 5.0)) self.uncertainty_band = config.get('uncertainty deadband', 2.5) # Economizer diagnostic thresholds and parameters self.high_limit = float(config.get('high_limit', 60.0)) self.min_oa_damper = float(config.get('minimum oad command', 15.0)) self.minimum_oa = float(config.get('minimum oa', 10.0)) self.oae2_damper_threshold = float(config.get('oae2_damper_threshold', 30.0)) self.temperature_diff_requirement = float(config.get('temperature difference requirement', 5.0)) self.oae2_oaf_threshold = float(config.get('oae2_oaf_threshold', 25.0)) self.oae4_oaf_threshold = float(config.get('oae4_oaf_threshold', 25.0)) self.oae5_oaf_threshold = float(config.get('oae5_oaf_threshold', 0)) self.damper_deadband = config.get('oad uncertainty band', 10.0) data_pts = config['points'] self.oatemp_name = data_pts['oat_point_name'] self.ratemp_name = data_pts['rat_point_name'] self.oa_damper_name = data_pts['damper_point_name'] self.fan_status_name = data_pts['fan_status_point_name'] self.matemp_name = data_pts.get('mat_point_name', 'not measured') self.timestamp_name = data_pts.get('timestamp_name', 'Date') self.cool_call_name = data_pts['cool_call_point_name'] self.cool_cmd_name = data_pts['cool_cmd_point_name'] if self.cool_call_name.lower() == 'not measured': if self.cool_cmd_name.lower() == 'not measured': _log.debug('One cooling status point must be ' 'available for diagnostics.') sys.exit() self.cool_call_name = self.cool_cmd_name self.cool_call_measured = False self.heat_cmd_name = data_pts['heat_cmd_point_name'] self.data_pts = data_pts.values() # RTU rated parameters (e.g., capacity) self.eer = float(config.get('EER', 10)) tonnage = float(config.get('tonnage')) if tonnage: self.cfm = 300*tonnage self.csv_input = config.get('csv_input', False) # Misc. data configuration parameters mat_missing = config.get('mat not measured'.lower(), [False, '']) if mat_missing and self.cool_cmd_name.lower() == 'not measured': _log.debug('If the mixed-air temperature is not measured then ' 'the units compressor command must be available.') sys.exit() if mat_missing and self.heat_cmd_name.lower() == 'not measured': _log.debug('If the mixed-air temperature is not measured then ' 'the units heat command must to run diagnostic') if mat_missing[0]: try: self.matemp_name = mat_missing[1] except: _log.debug('If the mixed-air temperature is not ' 'specified the discharge-air temperature ' 'must be available or the diagnostic ' 'cannot proceed.') sys.exit() self.matemp_missing = mat_missing[0] if self.heat_cmd_name.lower() in NO_TREND: self.heat_cmd_name = False if self.cool_cmd_name.lower() in NO_TREND: self.heat_cmd_name = False # Device occupancy schedule sunday = config.get('Sunday') monday = config.get('Monday') tuesday = config.get('Tuesday') wednesday = config.get('Wednesday') thursday = config.get('Thursday') friday = config.get('Friday') saturday = config.get('Saturday') self.schedule_dict = dict({0: sunday, 1: monday, 2: tuesday, 3: wednesday, 4: thursday, 5: friday, 6: saturday}) # Create status list to that determines RTU mode of operation. self.status_lst = config.get('status list') self.data_status = self.data_status.fromkeys(self.status_lst, None) input_file_name = '' if self.csv_input: self.input_file = config['input file'] input_file_name = basename(splitext(self.input_file)[0]) results_file_name = rtu_tag + '-' + input_file_name output_directory = config.get('results directory', __file__) if not isdir(output_directory): try: makedirs(output_directory) except: _log.debug('Cannot create results directory, ' 'check user permissions.') sys.exit() i = 0 now = datetime.date.today() file_path = join(output_directory, results_file_name + '({ts}).csv'.format(ts=now)) while isfile(file_path): i += 1 file_path = join(output_directory, results_file_name + '({})-{}.csv'.format(now, i)) self.result_file_path = file_path @Core.receiver('onstart') def startup(self, sender, **kwargs): '''Startup method.''' if self.csv_input: device_data = self.run_from_csv() self.process_file_data(device_data) return self.vip.pubsub.subscribe(peer='pubsub', prefix=device_topic, callback=self.new_data) def run_from_csv(self): '''Enter location for the data file if using text csv. Entry can be through file entry window using TKinter or through configuration file as input_file. ''' if not self.input_file: _log.error('No csv file not found ...') raise Exception print self.input_file if(not isfile(self.input_file)): raise Exception _, filextension = splitext(self.input_file) if filextension != '.csv' and filextension != '': _log.error('Input file must be a csv.') raise Exception bldg_data = self.read_oae_pandas() return bldg_data def read_oae_pandas(self): '''Parse metered data for RTU or AHU and provide to diagnostic algorithms. Uses panda library to efficiently parse the csv data and returns a panda time-series. ''' import pandas data = pandas.read_csv(self.input_file, error_bad_lines=False, sep=',') data = data.dropna() return data def process_file_data(self, device_data): '''Format parsed data from csv file.''' data = {} for _, row in device_data.iterrows(): data[self.fan_status_name] = row[self.fan_status_name] data[self.oatemp_name] = row[self.oatemp_name] data[self.ratemp_name] = row[self.ratemp_name] data[self.matemp_name] = row[self.matemp_name] data[self.oa_damper_name] = row[self.oa_damper_name] data[self.cool_call_name] = row[self.cool_call_name] data[self.timestamp_name] = dateutil.parser.parse(row[self.timestamp_name]) if self.cool_cmd_name: data[self.cool_cmd_name] = row[self.cool_cmd_name] if self.heat_cmd_name: data[self.heat_cmd_name] = row[self.heat_cmd_name] if self.first_data_scrape: for key in self.data_status: self.data_status[key] = data[key] self.first_data_scrape = False self.check_device_status(data) self.update_device_status(data) def new_data(self, peer, sender, bus, topic, headers, message): '''Receive real-time device data.''' _log.info('Data Received') data = message[0] _log.info(data) if self.first_data_scrape: for key in self.data_status: self.data_status[key] = data[key] self.first_data_scrape = False current_time_stamp = dateutil.parser.parse(headers['Date']) data.update({self.timestamp_name: current_time_stamp}) self.check_device_status(data) self.update_device_status(data) def check_device_status(self, device_data): '''Check if the device status has changed from last measurement.''' for key, value in self.data_status.items(): if device_data[key] != value or (self.timestamp and device_data[self.timestamp_name].hour != self.timestamp[-1].hour): self.run_diagnostics() break self.data_collector(device_data) def update_device_status(self, device_data): '''Update the device status (cooling, heating, ventilating.''' for key, _ in self.data_status.items(): self.data_status[key] = device_data[key] def data_collector(self, device_data): '''Store data by state and timestamp.''' self.oatemp.append(device_data[self.oatemp_name]) self.ratemp.append(device_data[self.ratemp_name]) self.matemp.append(device_data[self.matemp_name]) self.oa_damper.append(device_data[self.oa_damper_name]) self.cool_call.append(device_data[self.cool_call_name]) self.fan_status.append(device_data[self.fan_status_name]) self.timestamp.append(device_data[self.timestamp_name]) if self.heat_cmd_name: self.heating_status.append(device_data[self.heat_cmd_name]) if self.cool_cmd_name: self.compressor_status.append(device_data[self.cool_cmd_name]) def run_diagnostics(self): '''Use aggregated data to run diagnostics.''' oatemp = sum(self.oatemp)/len(self.oatemp) matemp = sum(self.matemp)/len(self.matemp) ratemp = sum(self.ratemp)/len(self.ratemp) oa_damper = sum(self.oa_damper)/len(self.oa_damper) cooling = max(self.cool_call) heating = max(self.heating_status) if self.heating_status else False compressor = max(self.compressor_status) if self.compressor_status else False fan_status = max(self.fan_status) beginning = self.timestamp[0] end = self.timestamp[-1] try: if fan_status: oaf = [(m - r)/(o - r) for o, r, m in zip(self.oatemp, self.ratemp, self.matemp)] oaf = sum(oaf)/len(oaf)*100.0 else: oaf = 'OFF' except: oaf = None self.reinit() _log.info('Performing Diagnostic') oae_1 = self.sensor_diagnostic(cooling, heating, matemp, ratemp, oatemp) if fan_status else 29 oae_2 = self.economizer_diagnostic1(oatemp, ratemp, matemp, cooling, compressor, oa_damper, oaf) if fan_status else 39 oae_3 = self.economizer_diagnostic2(oatemp, ratemp, cooling, oa_damper) if fan_status else 49 oae_4 = self.excess_oa_intake(oatemp, ratemp, matemp, cooling, compressor, heating, oa_damper, oaf) if fan_status else 59 oae_5 = self.insufficient_ventilation(oatemp, ratemp, matemp, cooling, compressor, heating, oa_damper, oaf) if fan_status else 69 oae_6 = self.schedule_diagnostic(cooling, fan_status, end) energy_impact = self.calculate_energy_impact(oae_2, oae_3, oae_4, oatemp, ratemp, matemp) if fan_status else 'OFF' if oaf != 'OFF': if oaf < 0: oaf = 0 if oaf > -5 else 'inconclusive' if oaf > 100: oaf = 100 if oaf < 115 else 'inconclusive' results = [beginning, end, oae_1, oae_2, oae_3, oae_4, oae_5, oae_6, energy_impact, oaf] _log.debug('results: {}'.format(results)) self.result_writer(results) def reinit(self): self.oatemp = [] self.ratemp = [] self.matemp = [] self.oa_damper = [] self.cool_call = [] self.heating_status = [] self.compressor_status = [] self.timestamp = [] self.fan_status = [] self.first_data_scrape = True self.data_status = self.data_status.fromkeys(self.status_lst, None) def sensor_diagnostic(self, cooling, heating, matemp, ratemp, oatemp): '''RTU temperature sensor diagnostic.''' # RAT sensor outside of expected operating range. if ratemp < self.rat_low or ratemp > self.rat_high: return 24 # OAT sensor outside of expected operating range. if oatemp < self.oat_low or oatemp > self.oat_high: return 25 # Conditions not favorable for diagnostic. if self.matemp_missing and (cooling or heating): return 22 # MAT sensor outside of expected operating range. if matemp < self.mat_low or matemp > self.mat_high: return 23 # Temperature sensor problem detected. if (matemp - ratemp > self.temp_sensor_threshold and matemp - oatemp > self.temp_sensor_threshold): return 21 # Temperature sensor problem detected. if (ratemp - matemp > self.temp_sensor_threshold and oatemp - matemp > self.temp_sensor_threshold): return 21 return 20 def economizer_diagnostic1(self, oatemp, ratemp, matemp, cooling, compressor, oa_damper, oaf): # unit is not cooling. if not cooling: return 31 # economizer_type is not properly configured. if self.economizer_type not in self.economizer_types: return 32 if self.economizer_type == 'differential_ddb': # Outdoor conditions are not conducive to diagnostic. if ratemp - oatemp < self.uncertainty_band: return 33 if self.economizer_type == 'highlimit': # Outdoor conditions are not conducive to diagnostic. if self.high_limit - oatemp < self.uncertainty_band: return 33 # Outdoor damper is not open fully to utilize economizing. if 100.0 - oa_damper > self.oae2_damper_threshold: return 34 # OAT and RAT are too close for conclusive diagnostic. if math.fabs(oatemp - ratemp) < self.temperature_diff_requirement: return 35 # MAT sensor is not measured and mechanical cooling is ON. # OA damper is open for economizing (NF). if self.matemp_missing and compressor: return 35 # OAF calculation resulted in an unexpected value. if oaf is None or oaf < - 0.1 or oaf > 125: return 36 # OAF is too low. if 100.0 - oaf > self.oae2_oaf_threshold: return 32 return 30 def economizer_diagnostic2(self, oatemp, ratemp, cooling, oa_damper): '''Unit is cooling.''' if cooling or not self.cool_call_measured: if self.economizer_type not in self.economizer_types: return 41 if self.economizer_type == 'differential_ddb': if oatemp - ratemp < self.uncertainty_band: return 42 if self.economizer_type == 'highlimit': if oatemp - self.hightlimit < self.uncertainty_band: return 42 if oa_damper > self.min_oa_damper*1.25: return 43 return 40 def excess_oa_intake(self, oatemp, ratemp, matemp, cooling, compressor, heating, oa_damper, oaf): if cooling or not self.cool_call_measured: # econmozier_type is not properly configured. if self.economizer_type not in self.economizer_types: return 51 if self.economizer_type == 'differential_ddb': # Outdoor conditions are not conducive to diagnostic. if oatemp - ratemp < self.uncertainty_band: return 52 if self.economizer_type == 'highlimit': # Outdoor conditions are not conducive to diagnostic. if oatemp - self.high_limit < self.uncertainty_band: return 52 # Outdoor damper is not open fully to utilize economizing. if oa_damper > self.min_oa_damper*1.25: return 53 # OAT and RAT are too close for conclusive diagnostic. if math.fabs(oatemp - ratemp) < self.temperature_diff_requirement: return 54 # MAT sensor is not measured and mechanical cooling/heating is ON. if self.matemp_missing and (compressor or heating): return 54 # OAF calculation resulted in an unexpected value. if oaf is None or oaf < -0.1 or oaf > 125: return 55 # Unit is brining in excess OA. if oaf > self.minimum_oa*1.25: return 56 # No problems detected. return 50 def insufficient_ventilation(self, oatemp, ratemp, matemp, cooling, compressor, heating, oa_damper, oaf): # Damper is significantly below the minimum damper set point (F). if self.min_oa_damper - oa_damper > self.damper_deadband: return 61 # Conditions are not favorable for OAF calculation (No Fault). if math.fabs(oatemp - ratemp) < self.temperature_diff_requirement: return 62 # Unexpected result for OAF calculation (No Fault). if oaf is None or oaf < -0.1 or oaf > 125: return 68 # MAT sensor is not measured and mechanical cooling/heating is ON. if self.matemp_missing and (compressor or heating): return 62 # Unit is bringing in insufficient OA (Fault) if self.minimum_oa - oaf > self.oae5_oaf_threshold: return 61 return 60 def schedule_diagnostic(self, cooling, fan_status, end_time): '''Simple Schedule diagnostic.''' if cooling or fan_status: day = end_time.weekday() sched = self.schedule_dict[day] start = int(sched[0]) end = int(sched[1]) if end_time.hour < start or end_time.hour > end: return 71 return 70 def calculate_energy_impact(self, oae_2, oae_3, oae_4, oatemp, ratemp, matemp): '''Estimate energy impact.''' energy_impact = None if oae_2 == 32 or oae_2 == 33 and matemp > oatemp: energy_impact = (1.08*self.cfm*(matemp - oatemp)/(1000*self.eer)) if oae_3 == 41 or oae_4 == 51 or oae_4 == 53 and oatemp > matemp: ei = 1.08*self.cfm/(1000*self.eer) ei = ei*(matemp - (oatemp*self.minimum_oa + ratemp*(1 - self.minimum_oa))) energy_impact = ei if ei > energy_impact else energy_impact if energy_impact is None or energy_impact < 0: energy_impact = 'inconclusive' return energy_impact def result_writer(self, contents): '''Data is aggregated into hourly or smaller intervals based on compressor status, heating status, and supply fan status for analysis. result_writer receives the diagnostic results and associated energy impact and writes the values to csv. ''' try: if not isfile(self.result_file_path): ofile = open(self.result_file_path, 'a+') outs = csv.writer(ofile, dialect='excel') writer = csv.DictWriter(ofile, fieldnames=['Beginning', 'End', 'OAE1', 'OAE2', 'OAE3', 'OAE4', 'OAE5', 'OAE6', 'Energy_Impact', 'OAF'], delimiter=',') writer.writeheader() else: ofile = open(self.result_file_path, 'a+') outs = csv.writer(ofile, dialect='excel') outs.writerow(contents) ofile.close() except IOError: print('Output error please close results file and rerun.') return return PassiveAFDD(**kwargs)
def set_points_name(self, config_path): config = utils.load_config(config_path) self.agent_id = config.get('agentid') self.headers = { 'Content-Type': 'text/plain', 'requesterID': self.agent_id } self.rtu_path = dict((key, config[key]) for key in ['campus', 'building', 'unit']) self.smap_path = config.get('smap_path') ##From Configuration file #Controller Points self.volttron_flag = config.get('volttron_flag') self.oat_name = config.get('oat_point_name') self.rat_name = config.get('rat_point_name') self.mat_name = config.get('mat_point_name') self.dat_name = config.get('dat_point_name') self.fan_status_name = config.get('fan_status_point_name') self.coolcall1_name = config.get('cool_call1_point_name') self.coolcall2_name = config.get('cool_call2_point_name') self.coolcmd1_name = config.get('cool_cmd1_point_name') self.coolcmd2_name = config.get('cool_cmd2_point_name') self.heat_cmd1_name = config.get('heat_command1_point_name') self.heat_cmd2_name = config.get('heat_command2_point_name') self.damper_name = config.get('damper_point_name') self.damper_command_name = config.get('damper_command_name') self.oat_bias_name = config.get('oat_bias') self.fan_speed = config.get('fan_command_name') self.mat_missing = config.get('mixed_air_sensor_missing') #Global parameters and thresholds self.oat_min = config.get('min_oa_temperature') self.oat_max = config.get('max_oa_temperature') self.rat_min = config.get('min_ra_temperature') self.rat_max = config.get('max_ra_temperature') self.mat_min = config.get('min_ma_temperature') self.mat_max = config.get('max_ma_temperature') self.seconds_to_steady_state = config.get('seconds_to_steady_state') self.minutes_to_average = config.get('minutes_to_average') self.cfm = config.get('cfm') self.EER = config.get('EER') self.economizertype = config.get('economizertype') self.high_limit = config.get('high_limit') self.afdd0_threshold = config.get('afdd0_mat_dat_consistency _threshold') #AFDD1 threshold self.afdd1_econ_threshold = config.get('afdd1_econ_temp_differential') self.afdd1_damper_threshold = config.get('afdd1_damper_modulation_threshold') #AFDD2 thresholds self.afdd2_temp_sensor_threshold = config.get('afdd2_tempsensorfault_threshold') self.afdd2_oat_mat_threshold = config.get('afdd2_oat_mat_consistency_threshold') self.afdd2_rat_mat_threshold = config.get('afdd2_rat_mat_consistency_threshold') #AFDD3 thresholds self.afdd3_oaf_threshold = config.get('afdd3_oaf_threshold') self.afdd3_econ_differential = config.get('afdd3_econ_temp_differential') self.afdd3_temp_differential = config.get('afdd3_oat_rat_temperature_difference_threshold') self.afdd3_open_damper_threshold = config.get('afdd3_open_damper_threshold') #AFDD4 thresholds self.afdd4_econ_differential = config.get('afdd4_econ_temp_differential') self.afdd4_damper_threshold = config.get('afdd4_damper_threshold') self.minimum_damper = config.get('minimum_damper_command') #AFDD5 thresholds self.afdd5_econ_differential = config.get('afdd5_econ_temp_differential') self.afdd5_temp_differential = config.get('afdd5_oat_rat_temperature_difference_threshold') self.afdd5_damper_threshold = config.get('afdd5_damper_threshold') self.afdd5_oaf_threshold = config.get('afdd5_oaf_threshold') self.minimum_oa = config.get('afdd5_minimum_oa') #AFDD6 thresholds self.afdd6_damper_threshold = config.get('afdd6_damper_threshold') self.afdd6_min_oa = config.get('afdd6_min_oa') self.afdd6_econ_differential = config.get('afdd6_econ_temp_differential') self.afdd6_temp_differential = config.get('afdd6_oat_rat_temperature_difference_threshold') self.afdd6_oaf_threshold = config.get('afdd6_oaf_threshold') utils.setup_logging() self._log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S')
def DrivenAgent(config_path, **kwargs): '''Driven harness for deployment of OpenEIS applications in VOLTTRON.''' config = utils.load_config(config_path) mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False validation_error = '' device = dict((key, config['device'][key]) for key in ['campus', 'building']) subdevices = [] conv_map = config.get('conversion_map') map_names = {} for key, value in conv_map.items(): map_names[key.lower() if isinstance(key, str) else key] = value # this implies a sub-device listing multiple_dev = isinstance(config['device']['unit'], dict) if multiple_dev: # Assumption that there will be only one entry in the dictionary. units = config['device']['unit'].keys() for item in units: subdevices.extend(config['device']['unit'][item]['subdevices']) # modify the device dict so that unit is now pointing to unit_name agent_id = config.get('agentid') device.update({'unit': units}) _analysis = deepcopy(device) _analysis_name = config.get('device').get('analysis_name', 'analysis_name') _analysis.update({'analysis_name': _analysis_name}) if not device: validation_error += 'Invalid agent_id specified in config\n' if not device: validation_error += 'Invalid device path specified in config\n' actuator_id = ( agent_id + '_' + "{campus}/{building}/{unit}".format(**device) ) application = config.get('application') if not application: validation_error += 'Invalid application specified in config\n' utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) config.update(config.get('arguments')) converter = ConversionMapper() output_file = config.get('output_file') base_dev = "devices/{campus}/{building}/".format(**device) devices_topic = ( base_dev + '({})(/.*)?/all$' .format('|'.join(re.escape(p) for p in units))) klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here # so that_process_results each time run is called the application # can keep it state. app_instance = klass(**config) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self.keys = None # master is where we copy from to get a poppable list of # subdevices that should be present before we run the analysis. self._master_subdevices = subdevices self._needed_subdevices = [] self._master_devices = units self._subdevice_values = {} self._needed_devices = [] self._device_values = {} self._initialize_devices() self.received_input_datetime = None self._kwargs = kwargs self.commands = {} self.current_point = None self.current_key = None if output_file is not None: with open(output_file, 'w') as writer: writer.close() self._header_written = False def _initialize_devices(self): self._needed_subdevices = deepcopy(self._master_subdevices) self._needed_devices = deepcopy(self._master_devices) self._subdevice_values = {} self._device_values = {} def _should_run_now(self): # Assumes the unit/all values will have values. if not len(self._device_values.keys()) > 0: return False return not (len(self._needed_subdevices) > 0 or len(self._needed_devices) > 0) @matching.match_regex(devices_topic) def on_rec_analysis_message(self, topic, headers, message, matched): # Do the analysis based upon the data passed (the old code). # print self._subdevice_values, self._device_values obj = jsonapi.loads(message[0]) dev_list = topic.split('/') device_or_subdevice = dev_list[-2] device_id = [dev for dev in self._master_devices if dev == device_or_subdevice] subdevice_id = [dev for dev in self._master_subdevices if dev == device_or_subdevice] if not device_id and not subdevice_id: return if isinstance(device_or_subdevice, unicode): device_or_subdevice = ( device_or_subdevice.decode('utf-8').encode('ascii') ) def agg_subdevice(obj): sub_obj = {} for key, value in obj.items(): sub_key = ''.join([key, '_', device_or_subdevice]) sub_obj[sub_key] = value if len(dev_list) > 5: self._subdevice_values.update(sub_obj) self._needed_subdevices.remove(device_or_subdevice) else: self._device_values.update(sub_obj) self._needed_devices.remove(device_or_subdevice) return # The below if statement is used to distinguish between unit/all # and unit/sub-device/all if (device_or_subdevice not in self._needed_devices and device_or_subdevice not in self._needed_subdevices): _log.error("Warning device values already present, " "reinitializing") self._initialize_devices() agg_subdevice(obj) if self._should_run_now(): field_names = {} self._device_values.update(self._subdevice_values) for k, v in self._device_values.items(): field_names[k.lower() if isinstance(k, str) else k] = v if not converter.initialized and \ conv_map is not None: converter.setup_conversion_map( map_names, field_names ) obj = converter.process_row(field_names) results = app_instance.run(datetime.now(), obj) self.received_input_datetime = datetime.utcnow() # results = app_instance.run( # dateutil.parser.parse(self._subdevice_values['Timestamp'], # fuzzy=True), self._subdevice_values) self._process_results(results) self._initialize_devices() else: needed = deepcopy(self._needed_devices) needed.extend(self._needed_subdevices) _log.info("Still need {} before running." .format(needed)) def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) if output_file is not None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() # publish to message bus. if len(results.table_output.keys()) > 0: headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for _, v in results.table_output.items(): for r in v: for key, value in r.iteritems(): if isinstance(value, bool): value = int(value) for item in units: _analysis['unit'] = item analysis_topic = topics.ANALYSIS_VALUE( point=key, **_analysis) self.publish_json(analysis_topic, headers, value) # mytime = int(time.time()) # content = { # analysis_topic: { # "Readings": [[mytime, value]], # "Units": "TU", # "data_type": "double" # } # } # self.publish_json(topics.LOGGER_LOG, headers, # content) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task() def schedule_task(self): '''Schedule access to modify device controls.''' _log.debug('Schedule Device Access') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id, 'priority': 'LOW' } start = datetime.now() end = start + td(seconds=30) start = str(start) end = str(end) self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, [["{campus}/{building}/{unit}".format(**device), start, end]]) def command_equip(self): '''Execute commands on configured device.''' self.current_key = self.keys[0] value = self.commands[self.current_key] headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=self.current_key, **device), headers, str(value)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): '''Actuator response (FAILURE, SUCESS).''' _log.debug('Actuator Response') msg = jsonapi.loads(message[0]) msg = msg['result'] _log.debug('Schedule Device ACCESS') if self.keys: if msg == "SUCCESS": self.command_equip() elif msg == "FAILURE": _log.debug('Auto-correction of device failed.') @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device)) def on_set_result(self, topic, headers, message, match): '''Setting of point on device was successful.''' _log.debug('Set Success: {point} - {value}' .format(point=self.current_key, value=str(self.commands[self.current_key]))) _log.debug('set_point({}, {})'. format(self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: _log.debug('Done with Commands - Release device lock.') headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device)) def on_set_error(self, topic, headers, message, match): '''Setting of point on device failed, log failure message.''' _log.debug('Set ERROR') msg = jsonapi.loads(message[0]) msg = msg['type'] _log.debug('Actuator Error: ({}, {}, {})'. format(msg, self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None Agent.__name__ = 'DrivenLoggerAgent' return Agent(**kwargs)
def DrivenAgent(config_path, **kwargs): '''Driven harness for deployment of OpenEIS applications in VOLTTRON.''' config = utils.load_config(config_path) mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False validation_error = '' device = dict((key, config['device'][key]) for key in ['campus', 'building', 'unit']) subdevices = {} for unit in device['unit']: if 'subdevices' in device['unit'][unit]: subdevices[unit] = device['unit'][unit]['subdevices'] agent_id = config.get('agentid') smap_path = config.get('smap_path') if not device: validation_error += 'Invalid agent_id specified in config\n' if not device: validation_error += 'Invalid device path specified in config\n' actuator_id = agent_id + '_' +"{campus}/{building}/{unit}".format(**device) application = config.get('application') if not application: validation_error += 'Invalid application specified in config\n' utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) config.update(config.get('arguments')) converter = ConversionMapper() output_file = config.get('output_file') klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here so that # each time run is called the application can keep it state. app_instance = klass(**config) print("TOPIC VALUE: {}".format(topics.ANALYSIS_VALUE)) print("TOPIC VALUE: {}".format(topics.DEVICES_VALUE)) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self.keys = None self._device_states = {} self._required_subdevice_values = subdevices self._subdevice_values = {} self._kwargs = kwargs self.commands = {} self.current_point = None self.current_key = None if output_file != None: with open(output_file, 'w') as writer: writer.close() self._header_written = False def initialize_subdevices(self): self._subdevice_values = {} for r in self._required_subdevice_values: for s in r: self._subdevice_values[r][s] = None def should_run_now(self): if len(self._required_subdevice_values) < 1: return True def has_subdevice_value(unit, subdevice): return self.subdevice_value[unit][subdevice] != None for r in self._required_subdevice_values: for s in r: if not has_subdevice_value(r, s): return False return True @matching.match_exact(topics.DEVICES_VALUE(point='all', **device)) def on_received_message(self, topic, headers, message, matched): '''Subscribe to device data and convert data to correct type for the driven application. ''' _log.debug("Message received") _log.debug("MESSAGE: " + jsonapi.dumps(message[0])) _log.debug("TOPIC: " + topic) data = jsonapi.loads(message[0]) if not converter.initialized and \ config.get('conversion_map') is not None: converter.setup_conversion_map(config.get('conversion_map'), data.keys()) data = converter.process_row(data) if len(self._required_subdevice_values) < 1: results = app_instance.run(datetime.now(), data) self._process_results(results) else: # apply data to subdevice values. if self.should_run_now(): results = app_instance.run(datetime.now(), self._subdevice_values) self._process_results(results) @matching.match_exact(topics.ANALYSIS_VALUE(point='all', **device)) def on_rec_analysis_message(self, topic, headers, message, matched): print('here!') def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) if output_file != None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task() def schedule_task(self): '''Schedule access to modify device controls.''' _log.debug('Schedule Device Access') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id, 'priority': 'LOW' } start = datetime.now() end = start + td(seconds=30) start = str(start) end = str(end) self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, [["{campus}/{building}/{unit}".format(**device), start, end]]) def command_equip(self): '''Execute commands on configured device.''' self.current_key = self.keys[0] value = self.commands[self.current_key] headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=self.current_key, **device), headers, str(value)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): '''Actuator response (FAILURE, SUCESS).''' print 'Actuator Response' msg = jsonapi.loads(message[0]) msg = msg['result'] _log.debug('Schedule Device ACCESS') if self.keys: if msg == "SUCCESS": self.command_equip() elif msg == "FAILURE": print 'auto correction failed' _log.debug('Auto-correction of device failed.') @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device)) def on_set_result(self, topic, headers, message, match): '''Setting of point on device was successful.''' print ('Set Success: {point} - {value}' .format(point=self.current_key, value=str(self.commands[self.current_key]))) _log.debug('set_point({}, {})'. format(self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: print 'Done with Commands - Release device lock.' headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device)) def on_set_error(self, topic, headers, message, match): '''Setting of point on device failed, log failure message.''' print 'Set ERROR' msg = jsonapi.loads(message[0]) msg = msg['type'] _log.debug('Actuator Error: ({}, {}, {})'. format(msg, self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None def publish_to_smap(self, smap_identifier, value, smap_identifier2, value2, time_value): ''' Push diagnostic results and energy impact to sMAP historian. ''' self._log.debug(''.join(['Push to sMAP - ', smap_identifier, str(dx_msg), ' Energy Impact: ', str(energy_impact)])) if time_value is None: mytime = int(time.time()) else: mytime = time.mktime(time_value.timetuple()) if value2 is not None: content = { smap_identifier: { "Readings": [[mytime, value]], "Units": "TU", "data_type": "double" }, smap_identifier2: { "Readings": [[mytime, value2]], "Units": "kWh/h", "data_type": "double"} } else: content = { smap_identifier: { "Readings": [[mytime, value]], "Units": "TU", "data_type": "double" } } self._agent.publish(self.smap_path, self.headers, jsonapi.dumps(content)) Agent.__name__ = 'DrivenLoggerAgent' return Agent(**kwargs)
def DrivenAgent(config_path, **kwargs): '''Driven harness for deployment of OpenEIS applications in VOLTTRON.''' config = utils.load_config(config_path) mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False validation_error = '' device = dict((key, config['device'][key]) for key in ['campus', 'building', 'unit']) agent_id = config.get('agentid') if not device: validation_error += 'Invalid agent_id specified in config\n' if not device: validation_error += 'Invalid device path specified in config\n' actuator_id = agent_id + '_' +"{campus}/{building}/{unit}".format(**device) application = config.get('application') if not application: validation_error += 'Invalid application specified in config\n' utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) config.update(config.get('arguments')) converter = ConversionMapper() output_file = config.get('output_file') klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here so that # each time run is called the application can keep it state. app_instance = klass(**config) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self.keys = None self._device_states = {} self._kwargs = kwargs self.commands = {} self.current_point = None self.current_key = None self.received_input_datetime = None if output_file != None: with open(output_file, 'w') as writer: writer.close() self._header_written = False @matching.match_exact(topics.DEVICES_VALUE(point='all', **device)) def on_received_message(self, topic, headers, message, matched): '''Subscribe to device data and convert data to correct type for the driven application. ''' _log.debug("Message received") _log.debug("MESSAGE: " + jsonapi.dumps(message[0])) _log.debug("TOPIC: " + topic) data = jsonapi.loads(message[0]) #TODO: grab the time from the header if it's there or use now if not self.received_input_datetime = datetime.utcnow() results = app_instance.run(self.received_input_datetime, data) self._process_results(results) def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) # publish to output file if available. if output_file != None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() # publish to message bus. if len(results.table_output.keys()) > 0: now = utils.format_timestamp(self.received_input_datetime) headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: now, headers_mod.TIMESTAMP: now } for _, v in results.table_output.items(): for r in v: for key, value in r.iteritems(): if isinstance(value, bool): value = int(value) topic = topics.ANALYSIS_VALUE(point=key, **config['device']) #.replace('{analysis}', key) #print "publishing {}->{}".format(topic, value) self.publish_json(topic, headers, value) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task() def schedule_task(self): '''Schedule access to modify device controls.''' _log.debug('Schedule Device Access') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id, 'priority': 'LOW' } start = datetime.now() end = start + td(seconds=300) start = str(start) end = str(end) _log.debug("{campus}/{building}/{unit}".format(**device)) self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, [["{campus}/{building}/{unit}".format(**device), start, end]]) def command_equip(self): '''Execute commands on configured device.''' self.current_key = self.keys[0] value = self.commands[self.current_key] headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=self.current_key, **device), headers, str(value)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): '''Actuator response (FAILURE, SUCESS).''' print 'Actuator Response' msg = jsonapi.loads(message[0]) msg = msg['result'] _log.debug('Schedule Device ACCESS') if self.keys: if msg == "SUCCESS": self.command_equip() elif msg == "FAILURE": print 'auto correction failed' _log.debug('Auto-correction of device failed.') @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device)) def on_set_result(self, topic, headers, message, match): '''Setting of point on device was successful.''' print ('Set Success: {point} - {value}' .format(point=self.current_key, value=str(self.commands[self.current_key]))) _log.debug('set_point({}, {})'. format(self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: print 'Done with Commands - Release device lock.' headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device)) def on_set_error(self, topic, headers, message, match): '''Setting of point on device failed, log failure message.''' print 'Set ERROR' msg = jsonapi.loads(message[0]) msg = msg['type'] _log.debug('Actuator Error: ({}, {}, {})'. format(msg, self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None Agent.__name__ = agent_id return Agent(**kwargs)
def DrivenAgent(config_path, **kwargs): config = utils.load_config(config_path) agent_id = config.get('agentid') conversions = config.get('conversion_map') validation_error = "" device_topic = config.get('device') if not device_topic: validation_error += "Invalid device specified in config\n" else: if not device_topic[-4:] == '/all': device_topic += '/all' application = config.get('application') if not application: validation_error += "Invalid application specified in config\n" utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) config.update(kwargs) klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here so that # each time run is called the application can keep it state. app_instance = klass(**config) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self._device_states = {} self._kwargs = kwargs _log.debug("device_topic is set to: "+device_topic) @matching.match_exact(device_topic) def on_received_message(self, topic, headers, message, matched): _log.debug("Message received") _log.debug("MESSAGE: "+ jsonapi.dumps(message[0])) _log.debug("TOPIC: "+ topic) data = jsonapi.loads(message[0]) results = app_instance.run(datetime.now(),data) self._process_results(results) def _process_results(self, results): _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) Agent.__name__ = 'DrivenLoggerAgent' return Agent(**kwargs)
def DrivenAgent(config_path, **kwargs): '''Driven harness for deployment of OpenEIS applications in VOLTTRON.''' config = utils.load_config(config_path) mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False validation_error = '' device = dict((key, config['device'][key]) for key in ['campus', 'building', 'unit']) agent_id = config.get('agentid') if not device: validation_error += 'Invalid agent_id specified in config\n' if not device: validation_error += 'Invalid device path specified in config\n' actuator_id = agent_id + '_' +"{campus}/{building}/{unit}".format(**device) application = config.get('application') if not application: validation_error += 'Invalid application specified in config\n' utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) config.update(config.get('arguments')) converter = ConversionMapper() output_file = config.get('output_file') klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here so that # each time run is called the application can keep it state. app_instance = klass(**config) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self.keys = None self._device_states = {} self._kwargs = kwargs self.commands = {} self.current_point = None self.current_key = None self.received_input_datetime = None if output_file != None: with open(output_file, 'w') as writer: writer.close() self._header_written = False @matching.match_exact(topics.DEVICES_VALUE(point='all', **device)) def on_received_message(self, topic, headers, message, matched): '''Subscribe to device data and convert data to correct type for the driven application. ''' _log.debug("Message received") _log.debug("MESSAGE: " + jsonapi.dumps(message[0])) _log.debug("TOPIC: " + topic) data = jsonapi.loads(message[0]) #TODO: grab the time from the header if it's there or use now if not self.received_input_datetime = datetime.utcnow() results = app_instance.run(self.received_input_datetime, data) self._process_results(results) def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) # publish to output file if available. if output_file != None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() # publish to message bus. if len(results.table_output.keys()) > 0: headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for _, v in results.table_output.items(): for r in v: for key, value in r.iteritems(): if isinstance(value, bool): value = int(value) topic = topics.ANALYSIS_VALUE(point=key, **config['device']) #.replace('{analysis}', key) #print "publishing {}->{}".format(topic, value) self.publish_json(topic, headers, value) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task() def schedule_task(self): '''Schedule access to modify device controls.''' _log.debug('Schedule Device Access') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id, 'priority': 'LOW' } start = datetime.now() end = start + td(seconds=300) start = str(start) end = str(end) _log.debug("{campus}/{building}/{unit}".format(**device)) self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, [["{campus}/{building}/{unit}".format(**device), start, end]]) def command_equip(self): '''Execute commands on configured device.''' self.current_key = self.keys[0] value = self.commands[self.current_key] headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=self.current_key, **device), headers, str(value)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): '''Actuator response (FAILURE, SUCESS).''' print 'Actuator Response' msg = jsonapi.loads(message[0]) msg = msg['result'] _log.debug('Schedule Device ACCESS') if self.keys: if msg == "SUCCESS": self.command_equip() elif msg == "FAILURE": print 'auto correction failed' _log.debug('Auto-correction of device failed.') @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device)) def on_set_result(self, topic, headers, message, match): '''Setting of point on device was successful.''' print ('Set Success: {point} - {value}' .format(point=self.current_key, value=str(self.commands[self.current_key]))) _log.debug('set_point({}, {})'. format(self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: print 'Done with Commands - Release device lock.' headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device)) def on_set_error(self, topic, headers, message, match): '''Setting of point on device failed, log failure message.''' print 'Set ERROR' msg = jsonapi.loads(message[0]) msg = msg['type'] _log.debug('Actuator Error: ({}, {}, {})'. format(msg, self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None Agent.__name__ = agent_id return Agent(**kwargs)
def DrivenAgent(config_path, **kwargs): config = utils.load_config(config_path) agent_id = config.get('agentid') conversions = config.get('conversion_map') validation_error = "" device_topic = config.get('device') if not device_topic: validation_error += "Invalid device specified in config\n" else: if not device_topic[-4:] == '/all': device_topic += '/all' application = config.get('application') if not application: validation_error += "Invalid application specified in config\n" utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) config.update(kwargs) klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here so that # each time run is called the application can keep it state. app_instance = klass(**config) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self._device_states = {} self._kwargs = kwargs _log.debug("device_topic is set to: " + device_topic) @matching.match_exact(device_topic) def on_received_message(self, topic, headers, message, matched): _log.debug("Message received") _log.debug("MESSAGE: " + jsonapi.dumps(message[0])) _log.debug("TOPIC: " + topic) data = jsonapi.loads(message[0]) results = app_instance.run(datetime.now(), data) self._process_results(results) def _process_results(self, results): _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) Agent.__name__ = 'DrivenLoggerAgent' return Agent(**kwargs)
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by # BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY # under Contract DE-AC05-76RL01830 # }}} import logging import sys import bson from bson import ObjectId import pymongo from volttron.platform.agent import utils from volttron.platform.agent.base_aggregate_historian import AggregateHistorian from volttron.platform.dbutils import mongoutils utils.setup_logging(logging.DEBUG) _log = logging.getLogger(__name__) __version__ = '1.0' class MongodbAggregateHistorian(AggregateHistorian): """ Agent to aggregate data in historian based on a specific time period. This aggregegate historian aggregates data collected by mongo historian. """ def __init__(self, config_path, **kwargs): """ Validate configuration, create connection to historian, create aggregate tables if necessary and set up a periodic call to aggregate data :param config_path: configuration file path
def passiveafdd(config_path, **kwargs): '''Passive fault detection application for AHU/RTU economizer systems''' config_data = utils.load_config(config_path) rtu_path = dict( (key, config_data[key]) for key in ['campus', 'building', 'unit']) utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') class Agent(PublishMixin, BaseAgent): def __init__(self, **kwargs): '''Input and initialize user configurable parameters.''' super(Agent, self).__init__(**kwargs) self.agent_id = config_data.get('agentid') self.aggregate_data = int(config_data["aggregate_data"]) self.matemp_missing = int(config_data["matemp_missing"]) self.mat_low = float(config_data["mat_low"]) self.mat_high = float(config_data["mat_high"]) self.oat_low = float(config_data["oat_low"]) self.oat_high = float(config_data["oat_high"]) self.rat_low = float(config_data["rat_low"]) self.rat_high = float(config_data["rat_high"]) self.high_limit = float(config_data["high_limit"]) self.oae2_damper_threshold = float( config_data["oae2_damper_threshold"]) self.oae2_oaf_threshold = float(config_data["oae2_oaf_threshold"]) self.economizer_type = int(config_data["economizer_type"]) self.damper_minimum = float(config_data["damper_minimum"]) self.minimum_oa = float(config_data["minimum_oa"]) self.oae4_oaf_threshold = float(config_data["oae4_oaf_threshold"]) self.oae5_oaf_threshold = float(config_data["oae5_oaf_threshold"]) self.eer = float(config_data["EER"]) tonnage = float(config_data["tonnage"]) self.cfm = 300 * tonnage self.csv_input = int(config_data["csv_input"]) self.timestamp_name = config_data.get('timestamp_name') self.input_file = config_data.get('input_file', 'CONFIG_ERROR') self.oat_name = config_data.get('oat_point_name') self.rat_name = config_data.get('rat_point_name') self.mat_name = config_data.get('mat_point_name') self.sampling_rate = config_data.get('sampling_rate') self.fan_status_name = config_data.get('fan_status_point_name') self.cool_cmd_name = config_data.get('cool_cmd_name') self.heat_cmd_name = config_data.get('heat_cmd_name') self.damper_name = config_data.get('damper_point_name') self.mat_missing = config_data.get('mixed_air_sensor_missing') self.temp_deadband = config_data.get('temp_deadband') self.damper_deadband = config.get('damper_deadband') sunday = config_data.get('Sunday') monday = config_data.get('Monday') tuesday = config_data.get('Tuesday') wednesday = config_data.get('Wednesday') thursday = config_data.get('Thursday') friday = config_data.get('Friday') saturday = config_data.get('Saturday') self.schedule_dict = dict({ 0: sunday, 1: monday, 2: tuesday, 3: wednesday, 4: thursday, 5: friday, 6: saturday }) self.oaf_raw = [] self.timestamp_raw = [] self.matemp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.compressor_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] self.oaf = [] self.timestamp = [] self.matemp = [] self.oatemp = [] self.ratemp = [] self.compressor = [] self.heating = [] self.damper = [] self.fan_status = [] self.run_aggregate = None self.names = [ config_data.get('oat_point_name'), config_data.get('mat_point_name'), config_data.get('dat_point_name'), config_data.get('rat_point_name'), config_data.get('damper_point_name'), config_data.get('cool_cmd_name'), config_data.get('fan_status_point_name'), config_data.get('heat_cmd_name') ] self.file = config_data.get('input_file') def setup(self): '''Enter location for the data file if using text csv. Entry can be through file entry window using TKinter or through configuration file as input_file. ''' try: super(Agent, self).setup() _log.info('Running') if self.csv_input: self.file_path = open_file() if self.file_path == '': _log.info('No csv file not found ...') return if (self.file_path == 'File Selected is not a csv' or not self.file_path.endswith('.csv')): _log.info('File must be in CSV format.') return if self.input_file == "CONFIG_ERROR": _log.info( 'Check configuration file and add input_file parameter ' 'as file path to data file') return if self.file_path is None: self.file_path = self.file self.bldg_data = read_oae_pandas(self.file_path, self.names) self.process_data() except Exception: _log.exception('Error on data input, could not data file...') def process_data(self): '''Aggregate the data based on compressor status, heating status, and supply-fan status where one hour is the largest aggregated interval. ''' _log.info('Processing data') timestamp = [] if self.csv_input: timestamp_ = self.bldg_data[self.timestamp_name].tolist() matemp = self.bldg_data[self.mat_name].tolist() oatemp = self.bldg_data[self.oat_name].tolist() ratemp = self.bldg_data[self.rat_name].tolist() compressor = self.bldg_data[self.cool_cmd_name].tolist() heating = self.bldg_data[self.heat_cmd_name].tolist() damper = self.bldg_data[self.damper_name].tolist() fan_status = self.bldg_data[self.fan_status_name].tolist() else: timestamp_ = self.timestamp_raw matemp = self.matemp_raw oatemp = self.oatemp_raw ratemp = self.ratemp_raw compressor = self.compressor_raw heating = self.heating_raw damper = self.damper_raw fan_status = self.fan_status_raw for item in timestamp_: timestamp.append(dateutil.parser.parse(item, fuzzy=True)) if self.aggregate_data: temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] for points in xrange(0, len(timestamp) - 1): temp_damper.append(damper[points]) temp_oat.append(oatemp[points]) temp_mat.append(matemp[points]) temp_rat.append(ratemp[points]) if timestamp[points].hour != timestamp[points + 1].hour: self.timestamp.append( (timestamp[points] + datetime.timedelta(hours=1)).replace(minute=0)) temp_oat[:] = (value for value in temp_oat if value != 0) temp_rat[:] = (value for value in temp_rat if value != 0) temp_mat[:] = (value for value in temp_mat if value != 0) self.damper.append(numpy.mean(temp_damper)) self.oatemp.append(numpy.mean(temp_oat)) self.matemp.append(numpy.mean(temp_mat)) self.ratemp.append(numpy.mean(temp_rat)) self.compressor.append(compressor[points]) self.fan_status.append(fan_status[points]) self.heating.append(heating[points]) temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] elif (compressor[points + 1] != compressor[points] or heating[points + 1] != heating[points] or ((timestamp[points + 1] - timestamp[points] > datetime.timedelta(minutes=self.sampling_rate)))): self.timestamp.append(timestamp[points]) temp_oat[:] = (value for value in temp_oat if value != 0) temp_rat[:] = (value for value in temp_rat if value != 0) temp_mat[:] = (value for value in temp_mat if value != 0) self.damper.append(numpy.mean(temp_damper)) self.oatemp.append(numpy.mean(temp_oat)) self.matemp.append(numpy.mean(temp_mat)) self.ratemp.append(numpy.mean(temp_rat)) self.compressor.append(compressor[points]) self.fan_status.append(fan_status[points]) self.heating.append(heating[points]) temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] if (points == len(timestamp) - 2 and not temp_oat): temp_damper.append(damper[points + 1]) temp_oat.append(oatemp[points + 1]) temp_mat.append(matemp[points + 1]) temp_rat.append(ratemp[points + 1]) self.timestamp.append(timestamp[points + 1]) temp_oat[:] = (value for value in temp_oat if value != 0) temp_rat[:] = (value for value in temp_rat if value != 0) temp_mat[:] = (value for value in temp_mat if value != 0) self.damper.append(numpy.mean(temp_damper)) self.oatemp.append(numpy.mean(temp_oat)) self.matemp.append(numpy.mean(temp_mat)) self.ratemp.append(numpy.mean(temp_rat)) self.compressor.append(compressor[points + 1]) self.fan_status.append(fan_status[points + 1]) self.heating.append(heating[points + 1]) temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] else: self.matemp = matemp self.oatemp = oatemp self.ratemp = ratemp self.compressor = compressor self.heating = heating self.damper = damper self.fan_status = fan_status self.oaf_raw = [] self.timestamp_raw = [] self.matemp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.compressor_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] self.newdata = len(self.timestamp) def check_nan(data): '''check for any nan values in data.''' length = len(data) for x in xrange(0, length): if math.isnan(data[x]): data[x] = -99 return data self.matemp = check_nan(self.matemp) self.oatemp = check_nan(self.oatemp) self.ratemp = check_nan(self.ratemp) self.compressor = check_nan(self.compressor) self.heating = check_nan(self.heating) self.damper = check_nan(self.damper) self.fan_status = check_nan(self.fan_status) self.oaf = self.calculate_oaf() #self.output_aggregate() _log.info('Performing Diagnostic') oae_1 = self.sensor_diagnostic() oae_2 = self.economizer_diagnostic1() oae_3 = self.economizer_diagnostic2() oae_4 = self.excess_oa_intake() oae_5 = self.insufficient_ventilation() oae_6 = self.schedule_diagnostic() energy_impact = self.calculate_energy_impact(oae_2, oae_3, oae_4) contents = [ self.timestamp, oae_1, oae_2, oae_3, oae_4, oae_5, oae_6, energy_impact, self.oaf ] result_writer(contents) def output_aggregate(self): '''output_aggregate writes the results of the data aggregation to file for inspection. ''' file_path = inspect.getfile(inspect.currentframe()) out_dir = os.path.dirname(os.path.realpath(file_path)) now = datetime.date.today() file_path = os.path.join(out_dir, "Aggregate_Data({ts}).csv".format(ts=now)) ofile = open(file_path, 'wb') x = [ self.timestamp, self.oatemp, self.matemp, self.ratemp, self.damper, self.compressor, self.heating, self.fan_status ] outs = csv.writer(ofile, dialect='excel') writer = csv.DictWriter(ofile, fieldnames=[ "Timestamp", "OutsideAirTemp", "MixedAirTemp", "ReturnAirTemp", "Damper", "CompressorStatus", "Heating", "FanStatus" ], delimiter=',') writer.writeheader() for row in itertools.izip_longest(*x): outs.writerow(row) ofile.close() def calculate_oaf(self): '''Create OAF vector for data set.''' for points in xrange(0, self.newdata): if (self.matemp[points] != -99 and self.oatemp[points] != -99 and self.ratemp[points] != -99 and math.fabs(self.oatemp[points] - self.ratemp[points]) > 4.0 and int(self.fan_status[points]) == 1): self.oaf.append( (self.matemp[points] - self.ratemp[points]) / (self.oatemp[points] - self.ratemp[points])) else: self.oaf.append(int(-99)) return self.oaf def sensor_diagnostic(self): oae1_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if int(self.fan_status[points]): if (self.matemp[points] != -99 and self.ratemp[points] != -99 and self.oatemp[points] != -99): if ((int(self.matemp_missing) and int(self.compressor[points]) or int(self.heating[points]))): oae1_result.append(22) elif (self.matemp[points] < self.mat_low or self.matemp[points] > self.mat_high): # Temperature sensor problem detected (fault). oae1_result.append(23) elif (self.ratemp[points] < self.rat_low or self.ratemp[points] > self.rat_high): # Temperature sensor problem detected (fault). oae1_result.append(24) elif (self.oatemp[points] < self.oat_low or self.oatemp[points] > self.oat_high): # Temperature sensor problem detected (fault). oae1_result.append(25) elif ( (self.matemp[points] > self.ratemp[points] and self.matemp[points] > self.oatemp[points]) or (self.matemp[points] < self.ratemp[points] and self.matemp[points] < self.oatemp[points])): # Temperature sensor problem detected (fault). oae1_result.append(21) else: # No faults detected. oae1_result.append(20) else: # Missing required data for diagnostic (No fault). oae1_result.append(27) else: # Unit is off (No Fault). oae1_result.append(29) else: # Missing required data for diagnostic (No fault). oae1_result.append(27) return oae1_result def economizer_diagnostic1(self): oae2_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if self.fan_status[points]: if (self.ratemp[points] != -99 and self.oatemp[points] != -99 and self.compressor[points] != -99 and self.damper[points] != -99): if ((self.ratemp[points] - self.oatemp[points] > self.temp_deadband and self.economizer_type == 0.0) or (self.high_limit - self.oatemp[points] > self.temp_deadband and self.economizer_type == 1.0)): if ((100.0 - self.damper[points]) < self.oae2_damper_threshold): if math.fabs( self.oatemp[points] - self.ratemp[points] ) > 5.0 and not self.matemp_missing: if (1.0 - self.oaf[points] < self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No fault detected. oae2_result.append(30) elif (1.0 - self.oaf[points] > self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # OAF is too low (Fault). oae2_result.append(32) else: # OAF resulted in unexpected value (No fault). oae2_result.append(38) elif not ((self.heating[points] and self.compressor[points]) and math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0 and self.matemp_missing): if (1.0 - self.oaf[points] < self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): oae2_result.append(30) elif (1.0 - self.oaf[points] > self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # OAF is too low when unit is economizing (Fault). oae2_result.append(32) else: oae2_result.append(38) else: oae2_result.append(36) else: # Damper is not open when conditions are favorable for economizing (Fault). oae2_result.append(33) else: oae2_result.append(31) else: #Missing data (No fault). oae2_result.append(37) else: # Supply fan is off (No fault). oae2_result.append(39) else: oae2_result.append(37) return oae2_result def economizer_diagnostic2(self): oae3_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if self.fan_status[points]: if (self.compressor[points] != -99 and self.ratemp[points] != -99 and self.oatemp[points] != -99 and self.damper[points] != -99): if ((self.oatemp[points] - self.ratemp[points] > self.temp_deadband and self.economizer_type == 0.0) or (self.oatemp[points] - self.high_limit > self.temp_deadband and self.economizer_type == 1.0)): if (self.compressor[points]): if self.damper[ points] <= self.damper_minimum: # No fault detected. oae3_result.append(40) else: # Damper should be at minimum # for ventilation(Fault). oae3_result.append(41) else: # Conditions are favorable for economizing oae3_result.append(43) else: if self.damper[points] <= self.damper_minimum: # Damper should be at minimum # for ventilation(Fault). oae3_result.append(41) else: # Missing Data (No fault). oae3_result.append(47) else: # Supply fan is off (No fault). oae3_result.append(49) else: # Missing data (No fault). oae3_result.append(47) return oae3_result def excess_oa_intake(self): oae4_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if self.fan_status[points]: if (self.compressor[points] != -99 and self.oatemp[points] != -99 and self.ratemp[points] != -99 and self.damper[points] != -99): if ((self.oatemp[points] - self.ratemp[points] > self.temp_deadband and self.economizer_type == 0.0) or (self.oatemp[points] - self.high_limit > self.temp_deadband and self.economizer_type == 1.0)): if self.damper[points] <= self.damper_minimum: if (not self.matemp_missing and math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0): if ((self.oaf[points] - self.minimum_oa ) < self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No Fault detected. oae4_result.append(50) elif ( (self.oaf[points] - self.minimum_oa ) > self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # Excess OA intake (Fault). oae4_result.append(51) else: # OAF calculation resulted in unexpected value (No fault). oae4_result.append(58) elif (not int(self.heating[points]) and not int(self.compressor[points]) and math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0 and self.matemp_missing): if (self.oaf[points] - self.minimum_oa < self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No fault detected. oae4_result.append(50) elif ( (self.oaf[points] - self.minimum_oa ) > self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # The unit is bringing in excess OA (Fault). oae4_result.append(51) else: # OAF calculation resulted in unexpected value (No Fault). oae4_result.append(58) else: # Conditions are not favorable for OAF calculation (No Fault). oae4_result.append(52) else: # Damper is not at minimum (Fault). oae4_result.append(53) else: # Unit may be economizing (No fault). oae4_result.append(56) else: # Missing data (No fault). oae4_result.append(57) else: # Supply fan is off (No Fault). oae4_result.append(59) else: # Missing data (No fault). oae4_result.append(57) return oae4_result def insufficient_ventilation(self): oae5_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if int(self.fan_status[points]): if (self.compressor[points] != -99 and self.oatemp[points] != -99 and self.ratemp[points] != -99 and self.damper[points] != -99): if (self.damper_minimum - self.damper[points] <= self.damper_deadband): if (math.fabs(self.oatemp[points] - self.ratemp[points] > 5.0) and not self.matemp_missing): if ((self.minimum_oa - self.oaf[points]) > self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # Unit is bringing in insufficient OA (Fault) oae5_result.append(61) elif ((self.minimum_oa - self.oaf[points]) < self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No problem detected. oae5_result.append(60) else: # Unexpected result for OAF calculation # (No Fault) oae5_result.append(68) elif (math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0 and self.matemp_missing and not int(self.compressor[points]) and int(self.heating[points])): if ((self.minimum_oa - self.oaf[points]) > self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): oae5_result.append( 61) # Insufficient OA (Fault) elif ((self.minimum_oa - self.oaf[points]) < self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): oae5_result.append(60) # No Fault else: # Unexpected result for OAF calculation # (No Fault). oae5_result.append(68) else: # Conditions are not favorable for OAF calculation (No Fault). oae5_result.append(62) else: # Damper is significantly below the minimum # damper set point (Fault) oae5_result.append(63) else: # Missing required data (No fault) oae5_result.append(67) else: # Unit is off (No fault). oae5_result.append(69) else: oae5_result.append(67) # Missing data (No Fault) return oae5_result def schedule_diagnostic(self): oae6_result = [] for points in xrange(0, self.newdata): if (self.fan_status[points] != -99 and self.compressor[points] != -99): if (int(self.fan_status[points]) or int(self.compressor[points])): day = self.timestamp[points].weekday() sched = self.schedule_dict[day] start = int(sched[0]) end = int(sched[1]) if (self.timestamp[points].hour < start or self.timestamp[points].hour > end): oae6_result.append(71) else: oae6_result.append(70) else: oae6_result.append(70) else: oae6_result.append(77) return oae6_result def calculate_energy_impact(self, oae_2, oae_3, oae_4): energy_impact = [] month_abbr = {k: v for k, v in enumerate(calendar.month_abbr)} if not self.matemp_missing: for points in xrange(0, self.newdata): if oae_2[points] == 32 or oae_2[points] == 33: energy_impact.append( 1.08 * self.cfm * (self.matemp[points] - self.oatemp[points]) / (1000 * self.eer)) elif (oae_3[points] == 41 or oae_4[points] == 51 or oae_4[points] == 53 and self.oatemp[points] > self.matemp[points]): ei = 1.08 * self.cfm / (1000 * self.eer) ei = ei * (self.matemp[points] - (self.oatemp[points] * self.minimum_oa + self.ratemp[points] * (1 - self.minimum_oa))) energy_impact.append(ei) elif (oae_3[points] == 41 or oae_4[points] == 51 or oae_4[points] == 53 and self.oatemp[points] > self.matemp[points]): ei = (1.08 * (self.oatemp[points] * self.minimum_oa + self.ratemp[points] * (1 - self.minimum_oa)) - self.cfm * (self.matemp[points]) / (1000 * self.eer)) energy_impact.append(ei) else: energy_impact.append(0) if energy_impact[points] < 0: energy_impact[points] = 0 return energy_impact @matching.match_exact(topics.DEVICES_VALUE(point='all', **rtu_path)) def datahandler(self, topic, header, message, match): '''Subscribes to data and assembles raw data arrays. data_handler subscribes to a device or simulated device on the message bus and assembles the array (lists) of data to be aggregated for analysis. ''' data = jsonapi.loads(message[0]) _log.info('Getting Data from message bus') publisher_id = header.get('AgentID', 0) if ((self.run_aggregate is False or self.run_aggregate is None) and publisher_id != 'publisher'): _log.info('Real-time device data.') self.run_aggregate = True event_time = (datetime.datetime.now().replace( hour=0, minute=0, second=0) + datetime.timedelta(days=1)) event = sched.Event(self.process_data) self.schedule(event_time, event) self.oaf_raw = [] self.timestamp_raw = [] self.matemp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.compressor_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] elif publisher_id == 'publisher': _log.info('Simulated device data.') if self.run_aggregate is None: self.prev_time = dateutil.parser.parse( data[self.timestamp_name]) self.run_aggregate = True time = dateutil.parser.parse(data[self.timestamp_name], fuzzy=True) time_delta = time - self.prev_time time_check = time + time_delt self.timestamp_raw.append(time) self.fan_status_raw.append(data[self.fan_status_name]) self.compressor_raw.append(data[self.coolcmd1_name]) self.heating_raw.append(data[self.heat_cmd1_name]) self.damper_raw.append(data[self.damper_name]) self.oatemp_raw.append(data[self.oat_name]) self.ratemp_raw.append(data[self.rat_name]) self.matemp_raw.append(data[self.mat_name]) if time.day < time_check.day: self.timestamp_raw.append(time_check) self.process_data() self.oaf_raw = [] self.timestamp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.compressor_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] self.prev_time = time if publisher_id != 'publisher': self.timestamp_raw.append(datetime.datetime.now()) self.fan_status_raw.append(data[self.fan_status_name]) self.compressor_raw.append(data[self.coolcmd1_name]) self.heating_raw.append(data[self.heat_cmd1_name]) self.damper_raw.append(data[self.damper_name]) self.oatemp_raw.append(data[self.oat_name]) self.ratemp_raw.append(data[self.rat_name]) self.matemp_raw.append(data[self.mat_name]) Agent.__name__ = 'passiveafdd' return Agent(**kwargs)
def DrivenAgent(config_path, **kwargs): '''Driven harness for deployment of OpenEIS applications in VOLTTRON.''' config = utils.load_config(config_path) arguments = config.get('arguments', None) assert arguments from_file = arguments.get('From File', False) mode = True if config.get('mode', 'PASSIVE') == 'ACTIVE' else False validation_error = '' device = dict( (key, config['device'][key]) for key in ['campus', 'building']) subdevices = [] conv_map = config.get('conversion_map') map_names = {} for key, value in conv_map.items(): map_names[key.lower() if isinstance(key, str) else key] = value # this implies a sub-device listing multiple_dev = isinstance(config['device']['unit'], dict) if multiple_dev: # Assumption that there will be only one entry in the dictionary. units = config['device']['unit'].keys() for item in units: subdevices.extend(config['device']['unit'][item]['subdevices']) # modify the device dict so that unit is now pointing to unit_name agent_id = config.get('agentid') device.update({'unit': units}) _analysis = deepcopy(device) _analysis_name = config.get('device').get('analysis_name', 'analysis_name') _analysis.update({'analysis_name': _analysis_name}) if not device: validation_error += 'Invalid agent_id specified in config\n' if not device: validation_error += 'Invalid device path specified in config\n' actuator_id = (agent_id + '_' + "{campus}/{building}/{unit}".format(**device)) application = config.get('application') if not application: validation_error += 'Invalid application specified in config\n' utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') if validation_error: _log.error(validation_error) raise ValueError(validation_error) # Collapse the arguments on top of the config file. config.update(config.get('arguments')) converter = ConversionMapper() output_file = config.get('output_file') base_dev = "devices/{campus}/{building}/".format(**device) devices_topic = ( base_dev + '({})(/.*)?/all$'.format('|'.join(re.escape(p) for p in units))) unittype_map = config.get('unittype_map', None) assert unittype_map klass = _get_class(application) # This instances is used to call the applications run method when # data comes in on the message bus. It is constructed here # so that_process_results each time run is called the application # can keep it state. app_instance = klass(**config) class Agent(PublishMixin, BaseAgent): '''Agent listens to message bus device and runs when data is published. ''' def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self._update_event = None self._update_event_time = None self.keys = None # master is where we copy from to get a poppable list of # subdevices that should be present before we run the analysis. self._master_subdevices = subdevices self._needed_subdevices = [] self._master_devices = units self._subdevice_values = {} self._needed_devices = [] self._device_values = {} self._initialize_devices() self.received_input_datetime = None self._kwargs = kwargs self.commands = {} self.current_point = None self.current_key = None if output_file is not None: with open(output_file, 'w') as writer: writer.close() self._header_written = False def _initialize_devices(self): self._needed_subdevices = deepcopy(self._master_subdevices) self._needed_devices = deepcopy(self._master_devices) self._subdevice_values = {} self._device_values = {} def _should_run_now(self): # Assumes the unit/all values will have values. if not len(self._device_values.keys()) > 0: return False return not (len(self._needed_subdevices) > 0 or len(self._needed_devices) > 0) @matching.match_regex(devices_topic) def on_rec_analysis_message(self, topic, headers, message, matched): # Do the analysis based upon the data passed (the old code). # print self._subdevice_values, self._device_values obj = jsonapi.loads(message[0]) if isinstance(obj, list): obj = obj[0] dev_list = topic.split('/') device_or_subdevice = dev_list[-2] device_id = [ dev for dev in self._master_devices if dev == device_or_subdevice ] subdevice_id = [ dev for dev in self._master_subdevices if dev == device_or_subdevice ] if not device_id and not subdevice_id: return if isinstance(device_or_subdevice, unicode): device_or_subdevice = ( device_or_subdevice.decode('utf-8').encode('ascii')) def agg_subdevice(obj): sub_obj = {} for key, value in obj.items(): sub_key = ''.join([key, '_', device_or_subdevice]) sub_obj[sub_key] = value if len(dev_list) > 5: self._subdevice_values.update(sub_obj) self._needed_subdevices.remove(device_or_subdevice) else: self._device_values.update(sub_obj) self._needed_devices.remove(device_or_subdevice) return # The below if statement is used to distinguish between unit/all # and unit/sub-device/all if (device_or_subdevice not in self._needed_devices and device_or_subdevice not in self._needed_subdevices): _log.error("Warning device values already present, " "reinitializing") self._initialize_devices() agg_subdevice(obj) if self._should_run_now(): field_names = {} self._device_values.update(self._subdevice_values) for k, v in self._device_values.items(): field_names[k.lower() if isinstance(k, str) else k] = v if not converter.initialized and \ conv_map is not None: converter.setup_conversion_map(map_names, field_names) if from_file: _timestamp = parse(headers.get('Date'), fuzzy=True) self.received_input_datetime = _timestamp else: _timestamp = datetime.datetime.now() self.received_input_datetime = datetime.datetime.utcnow() obj = converter.process_row(field_names) results = app_instance.run(_timestamp, obj) # results = app_instance.run( # dateutil.parser.parse(self._subdevice_values['Timestamp'], # fuzzy=True), self._subdevice_values) self._process_results(results) self._initialize_devices() else: needed = deepcopy(self._needed_devices) needed.extend(self._needed_subdevices) _log.info("Still need {} before running.".format(needed)) def _process_results(self, results): '''Run driven application with converted data and write the app results to a file or database. ''' _log.debug('Processing Results!') for key, value in results.commands.iteritems(): _log.debug("COMMAND: {}->{}".format(key, value)) for value in results.log_messages: _log.debug("LOG: {}".format(value)) for key, value in results.table_output.iteritems(): _log.debug("TABLE: {}->{}".format(key, value)) if output_file is not None: if len(results.table_output.keys()) > 0: for _, v in results.table_output.items(): fname = output_file # +"-"+k+".csv" for r in v: with open(fname, 'a+') as f: keys = r.keys() fout = csv.DictWriter(f, keys) if not self._header_written: fout.writeheader() self._header_written = True # if not header_written: # fout.writerow(keys) fout.writerow(r) f.close() def get_unit(point): ''' Get a unit type based upon the regular expression in the config file. if NOT found returns percent as a default unit. ''' for k, v in unittype_map.items(): if re.match(k, point): return v return 'percent' # publish to message bus. if len(results.table_output.keys()) > 0: headers = { headers_mod.CONTENT_TYPE: headers_mod.CONTENT_TYPE.JSON, headers_mod.DATE: str(self.received_input_datetime), } for _, v in results.table_output.items(): for r in v: for key, value in r.iteritems(): if isinstance(value, bool): value = int(value) for item in units: _analysis['unit'] = item analysis_topic = topics.ANALYSIS_VALUE( point=key, **_analysis) datatype = 'float' if isinstance(value, int): datatype = 'int' kbase = key[key.rfind('/') + 1:] message = [{ kbase: value }, { kbase: { 'tz': 'US/Pacific', 'type': datatype, 'units': get_unit(kbase) } }] self.publish_json(analysis_topic, headers, message) if results.commands and mode: self.commands = results.commands if self.keys is None: self.keys = self.commands.keys() self.schedule_task() def schedule_task(self): '''Schedule access to modify device controls.''' _log.debug('Schedule Device Access') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id, 'priority': 'LOW' } start = datetime.datetime.now() end = start + td(seconds=30) start = str(start) end = str(end) self.publish_json( topics.ACTUATOR_SCHEDULE_REQUEST(), headers, [["{campus}/{building}/{unit}".format(**device), start, end]]) def command_equip(self): '''Execute commands on configured device.''' self.current_key = self.keys[0] value = self.commands[self.current_key] headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=self.current_key, **device), headers, str(value)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): '''Actuator response (FAILURE, SUCESS).''' _log.debug('Actuator Response') msg = jsonapi.loads(message[0]) msg = msg['result'] _log.debug('Schedule Device ACCESS') if self.keys: if msg == "SUCCESS": self.command_equip() elif msg == "FAILURE": _log.debug('Auto-correction of device failed.') @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **device)) def on_set_result(self, topic, headers, message, match): '''Setting of point on device was successful.''' _log.debug('Set Success: {point} - {value}'.format( point=self.current_key, value=str(self.commands[self.current_key]))) _log.debug('set_point({}, {})'.format( self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: _log.debug('Done with Commands - Release device lock.') headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **device)) def on_set_error(self, topic, headers, message, match): '''Setting of point on device failed, log failure message.''' _log.debug('Set ERROR') msg = jsonapi.loads(message[0]) msg = msg['type'] _log.debug('Actuator Error: ({}, {}, {})'.format( msg, self.current_key, self.commands[self.current_key])) self.keys.remove(self.current_key) if self.keys: self.command_equip() else: headers = { 'type': 'CANCEL_SCHEDULE', 'requesterID': agent_id, 'taskID': actuator_id } self.publish_json(topics.ACTUATOR_SCHEDULE_REQUEST(), headers, {}) self.keys = None Agent.__name__ = 'DrivenLoggerAgent' return Agent(**kwargs)
from datetime import datetime import os import sys import json import gevent import logging from gevent.core import callback from gevent import Timeout from volttron.platform.messaging import headers as headers_mod from volttron.platform.vip.agent import Agent, PubSub, Core from volttron.platform.agent import utils # Log warnings and errors to make the node red log less chatty utils.setup_logging(level=logging.WARNING) _log = logging.getLogger(__name__) # These are the options that can be set from the settings module. from settings import agent_kwargs ''' takes two arguments. Firist is topic to publish under. Second is message. ''' if __name__ == '__main__': try: # If stdout is a pipe, re-open it line buffered if utils.isapipe(sys.stdout): # Hold a reference to the previous file object so it doesn't # get garbage collected and close the underlying descriptor. stdout = sys.stdout sys.stdout = os.fdopen(stdout.fileno(), 'w', 1)
#message = constructMessage(topic, message, content_type, fromUI); print os.path.basename(__file__) + "inside sock create connecttoZMQ" now = datetime.utcnow().isoformat(' ') + 'Z' headers = { headers_mod.CONTENT_TYPE: content_type, headers_mod.DATE: now, } print "created headers" #message = json.loads(message) print topic #self.publish("test","test","test msg") self.publish(topic, headers, message) print "published" ''' from datetime import datetime from volttron.lite.agent import BaseAgent, PublishMixin from volttron.lite.messaging import headers as headers_mod from volttron.lite.agent import utils import logging utils.setup_logging() _log = logging.getLogger(__name__) class ZMQ_PUB(PublishMixin, BaseAgent): def __init__(self, configpath,**kwargs): print "yeay..in" print "entering super"
# under Contract DE-AC05-76RL01830 # }}} from __future__ import absolute_import import logging import sys import bson from bson import ObjectId import pymongo from volttron.platform.agent import utils from volttron.platform.agent.base_aggregate_historian import AggregateHistorian from volttron.platform.dbutils import mongoutils utils.setup_logging(logging.DEBUG) _log = logging.getLogger(__name__) __version__ = '1.0' class MongodbAggregateHistorian(AggregateHistorian): """ Agent to aggregate data in historian based on a specific time period. This aggregegate historian aggregates data collected by mongo historian. """ def __init__(self, config_path, **kwargs): """ Validate configuration, create connection to historian, create aggregate tables if necessary and set up a periodic call to aggregate data
import json import datetime import time import logging import os import re from volttron.platform.agent import BaseAgent, PublishMixin from volttron.platform.agent import utils, matching from volttron.platform.messaging import headers as headers_mod from urlparse import urlparse import settings import netifaces as ni import ast import subprocess utils.setup_logging() # setup logger for debugging _log = logging.getLogger(__name__) # Step1: Agent Initialization def DeviceDiscoveryAgent(config_path, **kwargs): config = utils.load_config( config_path ) # load the config_path from devicediscoveryagent.launch.json def get_config(name): try: kwargs.pop(name) # from the **kwargs when call this function except KeyError as er: return config.get(name, '')
import json import datetime import time import logging import os import re from volttron.platform.agent import BaseAgent, PublishMixin from volttron.platform.agent import utils, matching from volttron.platform.messaging import headers as headers_mod from urlparse import urlparse import settings import netifaces as ni import ast import subprocess utils.setup_logging() # setup logger for debugging _log = logging.getLogger(__name__) # Step1: Agent Initialization def DeviceDiscoveryAgent(config_path, **kwargs): config = utils.load_config(config_path) # load the config_path from devicediscoveryagent.launch.json def get_config(name): try: value = kwargs.pop(name) # from the **kwargs when call this function except KeyError as er: print "keyError", er return config.get(name, '') # 1. @params agent agent_id = get_config('agent_id') device_scan_time = get_config('device_scan_time')
FAN_OFF = -99.3 DUCT_STC_RCX = "Duct Static Pressure Set Point Control Loop Dx" DUCT_STC_RCX1 = "Low Duct Static Pressure Dx" DUCT_STC_RCX2 = "High Duct Static Pressure Dx" DX = "/diagnostic message" SA_TEMP_RCX = "Supply-air Temperature Set Point Control Loop Dx" SA_TEMP_RCX1 = "Low Supply-air Temperature Dx" SA_TEMP_RCX2 = "High Supply-air Temperature Dx" dx_list = [ DUCT_STC_RCX, DUCT_STC_RCX1, DUCT_STC_RCX2, SA_TEMP_RCX, SA_TEMP_RCX1, SA_TEMP_RCX2 ] __version__ = "1.0.7" setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.info, format="%(asctime)s %(levelname)-8s %(message)s") def data_builder(value_tuple, point_name): value_list = [] for item in value_tuple: value_list.append(item[1]) return value_list class Application(AbstractDrivenAgent): """ Air-side HVAC Auto-Retuning Diagnostics
def AFDDAgent(config_path, **kwargs): config = utils.load_config(config_path) agent_id = config['agentid'] termination_window = config.get('termination_window', 600) min_run_window = config.get('min_run_window', 3600 + termination_window) rtu_path = dict((key, config[key]) for key in ['campus', 'building', 'unit']) day_run_interval = config.get('day_run_interval') start_hour = config.get('start_hour') start_minute = config.get('start_minute') volttron_flag = config.get('volttron_flag') debug_flag = True zip_code = config.get('zip_code') utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') class Agent(PublishMixin, BaseAgent): def __init__(self, **kwargs): super(Agent, self).__init__(**kwargs) self.lock_timer = None self.lock_acquired = False self.tasklet = None self.data_queue = green.WaitQueue(self.timer) self.value_queue = green.WaitQueue(self.timer) self.weather_data_queue = green.WaitQueue(self.timer) self.last_run_time = None self.is_running = False self.remaining_time = None self.task_id= agent_id self.retry_schedule = None self.start = None self.end = None def setup(self): super(Agent, self).setup() self.scheduled_task() def startrun(self, algo=None): _log.debug('start diagnostic') if algo is None: algo = afdd.AFDD(self,config_path).run_all self.tasklet = greenlet.greenlet(algo) self.is_running = True self.last_run_time = datetime.datetime.now() self.tasklet.switch() def scheduled_task(self): ''' Schedule re-occuring diagnostics ''' _log.debug('Schedule Dx') headers = { 'type': 'NEW_SCHEDULE', 'requesterID': agent_id, 'taskID': agent_id, 'priority': 'LOW_PREEMPT' } min_run_hour = math.floor(min_run_window/3600) min_run_minute = int((min_run_window/3600 - min_run_hour)*60) self.start = datetime.datetime.now().replace(hour=start_hour, minute=start_minute) self.end = self.start + datetime.timedelta(hours=2,minutes=30) run_start = self.end - datetime.datetime.now() required_diagnostic_time = datetime.timedelta(hours = min_run_hour, minutes=min_run_minute) if run_start < required_diagnostic_time: self.start = self.start + datetime.timedelta(days=1) self.end = self.start + datetime.timedelta(hours=2,minutes=30) sched_time = datetime.datetime.now() + datetime.timedelta(days=day_run_interval + 1) sched_time = sched_time.replace(hour=0,minute=1) else: sched_time = datetime.datetime.now() + datetime.timedelta(days=day_run_interval) self.start = str(self.start) self.end = str(self.end) self.task_timer = self.periodic_timer(60, self.publish_json, topics.ACTUATOR_SCHEDULE_REQUEST(), headers,[["{campus}/{building}/{unit}".format(**rtu_path),self.start,self.end]]) event = sched.Event(self.scheduled_task) self.next = self.schedule(sched_time, event) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id,'type': 'CANCEL_SCHEDULE'}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def preempt(self): if self.is_running: self.cancel_greenlet() @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_ANNOUNCE(**rtu_path)) def on_schedule(self, topic, headers, message, match): msg = jsonapi.loads(message[0]) now = datetime.datetime.now() self.remaining_time = headers.get('window', 0) if self.task_id == headers.get('taskID', ''): if self.remaining_time < termination_window: if self.is_running: self.cancel_greenlet() elif (self.remaining_time > min_run_window and (self.last_run_time is None or (now - self.last_run_time) > datetime.timedelta(hours=23, minutes=50))): self.startrun() @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact(topics.ACTUATOR_SCHEDULE_RESULT()) def schedule_result(self, topic, headers, message, match): msg = jsonapi.loads(message[0]) _log.debug('Actuator response received') self.task_timer.cancel() @matching.match_exact(topics.DEVICES_VALUE(point='all', **rtu_path)) def on_new_data(self, topic, headers, message, match): data = jsonapi.loads(message[0]) #Check override status if int(data["VoltronPBStatus"]) == 1: if self.is_running: _log.debug("User override is initiated...") headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point="VoltronFlag", **rtu_path), headers, str(0.0)) self.cancel_greenlet() else: self.data_queue.notify_all(data) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_VALUE(point='*', **rtu_path)) def on_set_result(self, topic, headers, message, match): self.value_queue.notify_all((match.group(1), True)) @matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_glob(topics.ACTUATOR_ERROR(point='*', **rtu_path)) def on_set_error(self, topic, headers, message, match): self.value_queue.notify_all((match.group(1), False)) def cancel_greenlet(self): #kill all tasks currently in the queue self.data_queue.kill_all() self.value_queue.kill_all() #kill current tasklet self.tasklet.throw() self.is_running = False def sleep(self, timeout): _log.debug('wait for steady state({})'.format(timeout)) green.sleep(timeout, self.timer) def get_new_data(self, timeout=None): _log.debug('get_new_data({})'.format(timeout)) return self.data_queue.wait(timeout) def command_equip(self, point_name, value, timeout=None): _log.debug('set_point({}, {}, {})'.format(point_name, value, timeout)) headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id, } self.publish(topics.ACTUATOR_SET(point=point_name, **rtu_path), headers, str(value)) try: return self.value_queue.wait(timeout) except green.Timeout: return True def weather_request(self,timeout=None): _log.debug('weather request for {}'.format(zip_code)) headers = { 'Content-Type': 'text/plain', 'requesterID': agent_id } msg = {'zipcode': str(zip_code)} self.publish_json('weather/request',headers, msg) try: return self.weather_data_queue.wait(timeout) except green.Timeout: return 'INCONCLUSIVE' matching.match_headers({headers_mod.REQUESTER_ID: agent_id}) @matching.match_exact('weather/response/temperature/temp_f') def weather_response(self, topic, headers, message, match): data = float(jsonapi.loads(message[0])) print data self.weather_data_queue.notify_all(data) Agent.__name__ = 'AFDDAgent' return Agent(**kwargs)
from volttron.platform.agent import utils from volttron.platform.messaging.topics import MARKET_RESERVE, MARKET_BID from volttron.platform.vip.agent import Agent, Core, RPC from volttron.platform.agent.base_market_agent.poly_line_factory import PolyLineFactory from volttron.platform.agent.base_market_agent.buy_sell import SELLER from volttron.platform.agent.base_market_agent.buy_sell import BUYER from volttron.platform.agent.base_market_agent.poly_line import PolyLine from volttron.platform.agent.base_market_agent.point import Point from .market_list import MarketList from .market_participant import MarketParticipant _tlog = logging.getLogger('transitions.core') _tlog.setLevel(logging.WARNING) _log = logging.getLogger(__name__) utils.setup_logging() __version__ = "0.01" INITIAL_WAIT = 'service_initial_wait' COLLECT_RESERVATIONS = 'service_collect_reservations' COLLECT_OFFERS = 'service_collect_offers' NO_MARKETS = 'service_has_no_markets' class MarketServiceAgent(Agent): states = [INITIAL_WAIT, COLLECT_RESERVATIONS, COLLECT_OFFERS, NO_MARKETS] transitions = [ { 'trigger': 'start_reservations', 'source': INITIAL_WAIT, 'dest': COLLECT_RESERVATIONS
from volttron.platform.vip.agent import Agent, Core from volttron.platform.jsonrpc import RemoteError from volttron.platform.agent.driven import ConversionMapper from volttron.platform.messaging import (headers as headers_mod, topics) import dateutil.tz __version__ = "1.0.8" __author1__ = "Craig Allwardt <*****@*****.**>" __author2__ = "Robert Lutes <*****@*****.**>" __author3__ = "Poorva Sharma <*****@*****.**>" __copyright__ = "Copyright (c) 2017, Battelle Memorial Institute" __license__ = "FreeBSD" DATE_FORMAT = "%m-%d-%y %H:%M" setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.info, format="%(asctime)s %(levelname)-8s %(message)s", datefmt=DATE_FORMAT) def driven_agent(config_path, **kwargs): """ Reads agent configuration and converts it to run driven agent. :param config_path: :param kwargs: :return: """ config = utils.load_config(config_path) arguments = config.get("arguments") actuation_mode = True if config.get("actuation_mode", "PASSIVE") == "ACTIVE" else False
def passiveafdd(config_path, **kwargs): '''Passive fault detection application for AHU/RTU economizer systems''' config = utils.load_config(config_path) rtu_path = dict((key, config[key]) for key in ['campus', 'building', 'unit']) utils.setup_logging() _log = logging.getLogger(__name__) logging.basicConfig(level=logging.debug, format='%(asctime)s %(levelname)-8s %(message)s', datefmt='%m-%d-%y %H:%M:%S') class Agent(PublishMixin, BaseAgent): def __init__(self, **kwargs): '''Input and initialize user configurable parameters.''' super(Agent, self).__init__(**kwargs) # Agent Configuration parameters self.agent_id = config.get('agentid') self.aggregate_data = int(config.get("aggregate_data", 1)) self.matemp_missing = int(config.get("matemp_missing")) # Temperature sensor diagnostic thresholds self.mat_low = float(config.get("mat_low", 50.0)) self.mat_high = float(config.get("mat_high", 90.0)) self.oat_low = float(config.get("oat_low", 30.0)) self.oat_high = float(config.get("oat_high", 120.0)) self.rat_low = float(config.get("rat_low", 50.0)) self.rat_high = float(config.get("rat_high", 90.0)) self.temp_sensor_threshold = ( float(config.get("temp_sensor_threshold", 5.0))) self.temp_deadband = config.get('temp_deadband', 2.5) # Economizer diagnostic thresholds and parameters self.high_limit = float(config.get("high_limit", 60.0)) self.economizer_type = int(config.get("economizer_type", 0)) self.damper_minimum = float(config.get("damper_minimum", 15.0)) self.minimum_oa = float(config.get("minimum_oa", 0.1)) self.oae2_damper_threshold = ( float(config.get("oae2_damper_threshold", 30.0))) self.oae2_oaf_threshold = \ float(config.get("oae2_oaf_threshold", 0.25)) self.oae4_oaf_threshold = \ float(config.get("oae4_oaf_threshold", 0.25)) self.oae5_oaf_threshold = \ float(config.get("oae5_oaf_threshold", 0)) self.damper_deadband = config.get('damper_deadband', 10.0) # RTU rated parameters (e.g., capacity) self.eer = float(config.get("EER", 10)) tonnage = float(config.get("tonnage")) if tonnage: self.cfm = 300*tonnage self.csv_input = int(config["csv_input"]) # Point names for input file (CSV) or BACnet config self.timestamp_name = config.get('timestamp_name') self.input_file = config.get('input_file', 'CONFIG_ERROR') self.oat_name = config.get('oat_point_name') self.rat_name = config.get('rat_point_name') self.mat_name = config.get('mat_point_name') self.fan_status_name = config.get('fan_status_point_name') self.cool_cmd_name = config.get('cool_cmd_name') self.heat_cmd_name = config.get('heat_cmd_name') self.damper_name = config.get('damper_point_name') # Misc. data configuration parameters self.sampling_rate = config.get('sampling_rate') self.mat_missing = config.get('mixed_air_sensor_missing') # Device occupancy schedule sunday = config.get('Sunday') monday = config.get('Monday') tuesday = config.get('Tuesday') wednesday = config.get('Wednesday') thursday = config.get('Thursday') friday = config.get('Friday') saturday = config.get('Saturday') self.schedule_dict = dict({0: sunday, 1: monday, 2: tuesday, 3: wednesday, 4: thursday, 5: friday, 6: saturday}) # Initialize raw data arrays used during data aggregation self.oaf_raw = [] self.timestamp_raw = [] self.matemp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.cooling_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] # Initialize final data arrays used during diagnostics self.oaf = [] self.timestamp = [] self.matemp = [] self.oatemp = [] self.ratemp = [] self.cooling = [] self.heating = [] self.damper = [] self.fan_status = [] self.run_aggregate = None self.names = [config.get('oat_point_name'), config.get('mat_point_name'), config.get('dat_point_name'), config.get('rat_point_name'), config.get('damper_point_name'), config.get('cool_cmd_name'), config.get('fan_status_point_name'), config.get('heat_cmd_name')] self.file = config.get('input_file') def setup(self): '''Enter location for the data file if using text csv. Entry can be through file entry window using TKinter or through configuration file as input_file. ''' try: super(Agent, self).setup() _log.info('Running') if self.csv_input: self.file_path = open_file() if self.file_path == '': _log.info('No csv file not found ...') return if (self.file_path == 'File Selected is not a csv' or not self.file_path.endswith('.csv')): _log.info('File must be in CSV format.') return if self.file_path is None and self.input_file == "CONFIG_ERROR": _log.info( 'Check configuration file and add input_file ' 'parameter as file path to data file') return if self.file_path is None: self.file_path = self.file self.bldg_data = read_oae_pandas(self.file_path, self.names) self.process_data() except: _log.exception('Error:' + str(sys.exc_info()[0])) def process_data(self): '''Aggregate the data based on cooling status, heating status, and supply-fan status where one hour is the largest aggregated interval. ''' _log.info('Processing data') timestamp = [] if self.csv_input: timestamp_ = self.bldg_data[self.timestamp_name].tolist() matemp = self.bldg_data[self.mat_name].tolist() oatemp = self.bldg_data[self.oat_name].tolist() ratemp = self.bldg_data[self.rat_name].tolist() cooling = self.bldg_data[self.cool_cmd_name].tolist() heating = self.bldg_data[self.heat_cmd_name].tolist() damper = self.bldg_data[self.damper_name].tolist() fan_status = self.bldg_data[self.fan_status_name].tolist() else: timestamp_ = self.timestamp_raw matemp = self.matemp_raw oatemp = self.oatemp_raw ratemp = self.ratemp_raw cooling = self.cooling_raw heating = self.heating_raw damper = self.damper_raw fan_status = self.fan_status_raw for item in timestamp_: timestamp.append(dateutil.parser. parse(item, fuzzy=True)) if self.aggregate_data: temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] for points in xrange(0, len(timestamp)-1): temp_damper.append(damper[points]) temp_oat.append(oatemp[points]) temp_mat.append(matemp[points]) temp_rat.append(ratemp[points]) if timestamp[points].hour != timestamp[points+1].hour: self.timestamp.append((timestamp[points] + datetime.timedelta(hours=1)). replace(minute=0)) temp_oat[:] = (value for value in temp_oat if value != 0) temp_rat[:] = (value for value in temp_rat if value != 0) temp_mat[:] = (value for value in temp_mat if value != 0) self.damper.append(numpy.mean(temp_damper)) self.oatemp.append(numpy.mean(temp_oat)) self.matemp.append(numpy.mean(temp_mat)) self.ratemp.append(numpy.mean(temp_rat)) self.cooling.append(cooling[points]) self.fan_status.append(fan_status[points]) self.heating.append(heating[points]) temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] elif (cooling[points+1] != cooling[points] or heating[points+1] != heating[points] or fan_status[points+1] != fan_status[points] or ((timestamp[points+1] - timestamp[points] > datetime.timedelta(minutes=self.sampling_rate)))): self.timestamp.append(timestamp[points]) temp_oat[:] = (value for value in temp_oat if value != 0) temp_rat[:] = (value for value in temp_rat if value != 0) temp_mat[:] = (value for value in temp_mat if value != 0) self.damper.append(numpy.mean(temp_damper)) self.oatemp.append(numpy.mean(temp_oat)) self.matemp.append(numpy.mean(temp_mat)) self.ratemp.append(numpy.mean(temp_rat)) self.cooling.append(cooling[points]) self.fan_status.append(fan_status[points]) self.heating.append(heating[points]) temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] if (points == len(timestamp) - 2 and not temp_oat): temp_damper.append(damper[points+1]) temp_oat.append(oatemp[points+1]) temp_mat.append(matemp[points+1]) temp_rat.append(ratemp[points+1]) self.timestamp.append(timestamp[points+1]) temp_oat[:] = (value for value in temp_oat if value != 0) temp_rat[:] = (value for value in temp_rat if value != 0) temp_mat[:] = (value for value in temp_mat if value != 0) self.damper.append(numpy.mean(temp_damper)) self.oatemp.append(numpy.mean(temp_oat)) self.matemp.append(numpy.mean(temp_mat)) self.ratemp.append(numpy.mean(temp_rat)) self.cooling.append(cooling[points+1]) self.fan_status.append(fan_status[points+1]) self.heating.append(heating[points+1]) temp_damper = [] temp_mat = [] temp_oat = [] temp_rat = [] else: self.timestamp = timestamp self.matemp = matemp self.oatemp = oatemp self.ratemp = ratemp self.cooling = cooling self.heating = heating self.damper = damper self.fan_status = fan_status self.oaf_raw = [] self.timestamp_raw = [] self.matemp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.cooling_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] self.newdata = len(self.timestamp) def check_nan(data): '''check for any nan values in data.''' length = len(data) for x in xrange(0, length): if math.isnan(data[x]): data[x] = -99 return data self.newdata = len(self.timestamp) self.matemp = check_nan(self.matemp) self.oatemp = check_nan(self.oatemp) self.ratemp = check_nan(self.ratemp) self.cooling = check_nan(self.cooling) self.heating = check_nan(self.heating) self.damper = check_nan(self.damper) self.fan_status = check_nan(self.fan_status) self.oaf = self.calculate_oaf() self.output_aggregate() _log.info('Performing Diagnostic') oae_1 = self.sensor_diagnostic() oae_2 = self.economizer_diagnostic1() oae_3 = self.economizer_diagnostic2() oae_4 = self.excess_oa_intake() oae_5 = self.insufficient_ventilation() oae_6 = self.schedule_diagnostic() energy_impact = self.calculate_energy_impact(oae_2, oae_3, oae_4) contents = [self.timestamp, oae_1, oae_2, oae_3, oae_4, oae_5, oae_6, energy_impact, self.oaf] result_writer(contents) _log.info('Processing Done!') def output_aggregate(self): '''output_aggregate writes the results of the data aggregation to file for inspection. ''' file_path = inspect.getfile(inspect.currentframe()) out_dir = os.path.dirname(os.path.realpath(file_path)) now = datetime.date.today() file_path = os.path.join(out_dir, "Aggregate_Data({ts}).csv". format(ts=now)) ofile = open(file_path, 'wb') x = [self.timestamp, self.oatemp, self.matemp, self.ratemp, self.damper, self.cooling, self.heating, self.fan_status] outs = csv.writer(ofile, dialect='excel') writer = csv.DictWriter(ofile, fieldnames=["Timestamp", "OutsideAirTemp", "MixedAirTemp", "ReturnAirTemp", "Damper", "CoolingStatus", "Heating", "FanStatus"], delimiter=',') writer.writeheader() for row in itertools.izip_longest(*x): outs.writerow(row) ofile.close() def calculate_oaf(self): '''Create OAF vector for data set.''' for points in xrange(0, self.newdata): if (self.matemp[points] != -99 and self.oatemp[points] != -99 and self.ratemp[points] != -99 and math.fabs(self.oatemp[points] - self.ratemp[points]) > 4.0 and int(self.fan_status[points]) == 1): self.oaf.append(( self.matemp[points] - self.ratemp[points]) / (self.oatemp[points] - self.ratemp[points])) else: self.oaf.append(int(-99)) return self.oaf def sensor_diagnostic(self): oae1_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if int(self.fan_status[points]): if (self.matemp[points] != -99 and self.ratemp[points] != -99 and self.oatemp[points] != -99): if ((int(self.matemp_missing)) and (int(self.cooling[points]) or int(self.heating[points]))): oae1_result.append(22) elif (self.matemp[points] < self.mat_low or self.matemp[points] > self.mat_high): # Temperature sensor problem detected (fault). oae1_result.append(23) elif (self.ratemp[points] < self.rat_low or self.ratemp[points] > self.rat_high): # Temperature sensor problem detected (fault). oae1_result.append(24) elif (self.oatemp[points] < self.oat_low or self.oatemp[points] > self.oat_high): # Temperature sensor problem detected (fault). oae1_result.append(25) elif ((self.matemp[points] - self.ratemp[points] > self.temp_sensor_threshold and self.matemp[points] - self.oatemp[points] > self.temp_sensor_threshold) or (self.matemp[points] - self.ratemp[points] > self.temp_sensor_threshold and self.oatemp[points] - self.matemp[points] > self.temp_sensor_threshold)): # Temperature sensor problem detected (fault). oae1_result.append(21) else: # No faults detected. oae1_result.append(20) else: # Missing required data for diagnostic (No fault). oae1_result.append(27) else: # Unit is off (No Fault). oae1_result.append(29) else: # Missing required data for diagnostic (No fault). oae1_result.append(27) return oae1_result def economizer_diagnostic1(self): oae2_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if self.fan_status[points]: if (self.oaf[points] != -99 and self.cooling[points] != -99 and self.damper[points] != -99): if((self.ratemp[points] - self.oatemp[points] > self.temp_deadband and self.cooling[points] and self.economizer_type == 0.0) or (self.high_limit - self.oatemp[points] > self.temp_deadband and self.cooling[points] and self.economizer_type == 1.0)): if ((100.0 - self.damper[points]) < self.oae2_damper_threshold): if (math.fabs( self.oatemp[points] - self.ratemp[points]) > 5.0 and not self.matemp_missing): if (1.0 - self.oaf[points] < self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No fault detected. oae2_result.append(30) elif (1.0 - self.oaf[points] > self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # OAF is too low (Fault). oae2_result.append(32) else: # OAF resulted in unexpected # value (No fault). oae2_result.append(38) elif not (( self.heating[points] and self.cooling[points]) and math.fabs( self.oatemp[points] - self.ratemp[points]) > 5.0 and self.matemp_missing): if ( 1.0 - self.oaf[points] < self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): oae2_result.append(30) elif (1.0 - self.oaf[points] > self.oae2_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # OAF is too low when unit is # economizing (Fault). oae2_result.append(32) else: oae2_result.append(38) else: oae2_result.append(36) else: # Damper is not open when conditions # are favorable for economizing (Fault). oae2_result.append(33) else: oae2_result.append(31) else: # Missing data (No fault). oae2_result.append(37) else: # Supply fan is off (No fault). oae2_result.append(39) else: oae2_result.append(37) return oae2_result def economizer_diagnostic2(self): oae3_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if self.fan_status[points]: if (self.cooling[points] != -99 and self.ratemp[points] != -99 and self.oatemp[points] != -99 and self.damper[points] != -99): if((self.oatemp[points] - self.ratemp[points] > self.temp_deadband and self.economizer_type == 0.0) or (self.oatemp[points] - self.high_limit > self.temp_deadband and self.economizer_type == 1.0)): if self.damper[points] <= self.damper_minimum: # No fault detected. oae3_result.append(40) else: # Damper should be at minimum # for ventilation(Fault). oae3_result.append(41) else: # Conditions are favorable for economizing oae3_result.append(43) else: # Missing Data (No fault). oae3_result.append(47) else: # Supply fan is off (No fault). oae3_result.append(49) else: # Missing data (No fault). oae3_result.append(47) return oae3_result def excess_oa_intake(self): oae4_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if self.fan_status[points]: if (self.cooling[points] != -99 and self.oaf[points] != -99 and self.damper[points] != -99): if((self.oatemp[points] - self.ratemp[points] > self.temp_deadband and self.economizer_type == 0.0) or (self.oatemp[points] - self.high_limit > self.temp_deadband and self.economizer_type == 1.0)): if self.damper[points] <= self.damper_minimum: if (not self.matemp_missing and math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0): if ((self.oaf[points] - self.minimum_oa) < self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No Fault detected. oae4_result.append(50) elif ((self.oaf[points] - self.minimum_oa) > self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # Excess OA intake (Fault). oae4_result.append(51) else: # OAF calculation resulted in an # unexpected value (No fault). oae4_result.append(58) elif (not int(self.heating[points]) and not int(self.cooling[points]) and math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0 and self.matemp_missing): if ( self.oaf[points] - self.minimum_oa < self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No fault detected. oae4_result.append(50) elif ((self.oaf[points] - self.minimum_oa) > self.oae4_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # The unit is bringing in excess # OA (Fault). oae4_result.append(51) else: # OAF calculation resulted in # unexpected value (No Fault). oae4_result.append(58) else: # Conditions are not favorable for OAF # calculation (No Fault). oae4_result.append(52) else: # Damper is not at minimum (Fault). oae4_result.append(53) else: # Unit may be economizing (No fault). oae4_result.append(56) else: # Missing data (No fault). oae4_result.append(57) else: # Supply fan is off (No Fault). oae4_result.append(59) else: # Missing data (No fault). oae4_result.append(57) return oae4_result def insufficient_ventilation(self): oae5_result = [] for points in xrange(0, self.newdata): if self.fan_status[points] != -99: if int(self.fan_status[points]): if (self.cooling[points] != -99 and self.oaf[points] != -99 and self.damper[points] != -99): if (self.damper_minimum - self.damper[points] <= self.damper_deadband): if ((math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0) and not self.matemp_missing): if ((self.minimum_oa - self.oaf[points]) > self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # Unit is bringing in insufficient # OA (Fault) oae5_result.append(61) elif ( self.minimum_oa - self.oaf[points] < self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): # No problem detected. oae5_result.append(60) else: # Unexpected result for OAF calculation # (No Fault) oae5_result.append(68) elif (math.fabs(self.oatemp[points] - self.ratemp[points]) > 5.0 and self.matemp_missing and not int(self.cooling[points]) and int(self.heating[points])): if ((self.minimum_oa - self.oaf[points]) > self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): oae5_result.append(61) # Insufficient OA (Fault) elif ((self.minimum_oa - self.oaf[points]) < self.oae5_oaf_threshold and self.oaf[points] > 0 and self.oaf[points] < 1.25): oae5_result.append(60) # No Fault else: # Unexpected result for OAF calculation # (No Fault). oae5_result.append(68) else: # Conditions are not favorable for # OAF calculation (No Fault). oae5_result.append(62) else: # Damper is significantly below the minimum # damper set point (Fault) oae5_result.append(63) else: # Missing required data (No fault) oae5_result.append(67) else: # Unit is off (No fault). oae5_result.append(69) else: oae5_result.append(67) # Missing data (No Fault) return oae5_result def schedule_diagnostic(self): oae6_result = [] for points in xrange(0, self.newdata): if (self.fan_status[points] != -99 and self.cooling[points] != -99): if (int(self.fan_status[points]) or int(self.cooling[points])): day = self.timestamp[points].weekday() sched = self.schedule_dict[day] start = int(sched[0]) end = int(sched[1]) if (self.timestamp[points].hour < start or self.timestamp[points].hour > end): oae6_result.append(71) else: oae6_result.append(70) else: oae6_result.append(70) else: oae6_result.append(77) return oae6_result def calculate_energy_impact(self, oae_2, oae_3, oae_4): energy_impact = [] month_abbr = {k: v for k, v in enumerate(calendar.month_abbr)} if not self.matemp_missing: for points in xrange(0, self.newdata): if oae_2[points] == 32 or oae_2[points] == 33: energy_impact.append( 1.08*self.cfm*(self.matemp[points] - self.oatemp[points]) / (1000*self.eer)) elif (oae_3[points] == 41 or oae_4[points] == 51 or oae_4[points] == 53 and self. oatemp[points] > self.matemp[points]): ei = 1.08*self.cfm/(1000*self.eer) ei = ei*(self.matemp[points] - (self.oatemp[points] * self.minimum_oa + self.ratemp[points] * (1 - self.minimum_oa))) energy_impact.append(ei) elif (oae_3[points] == 41 or oae_4[points] == 51 or oae_4[points] == 53 and self. oatemp[points] > self.matemp[points]): ei = (1.08*( self.oatemp[points]*self.minimum_oa + self.ratemp[points]*(1 - self.minimum_oa)) - self.cfm*(self.matemp[points])/(1000*self.eer)) energy_impact.append(ei) else: energy_impact.append(0) if energy_impact[points] < 0: energy_impact[points] = 0 return energy_impact @matching.match_exact(topics.DEVICES_VALUE(point='all', **rtu_path)) def datahandler(self, topic, header, message, match): '''Subscribes to data and assembles raw data arrays. data_handler subscribes to a device or simulated device on the message bus and assembles the array (lists) of data to be aggregated for analysis. ''' data = jsonapi.loads(message[0]) _log.info('Getting Data from message bus') publisher_id = header.get('AgentID', 0) if ((self.run_aggregate is False or self.run_aggregate is None) and publisher_id != 'publisher'): _log.info('Real-time device data.') self.run_aggregate = True event_time = (datetime.datetime.now(). replace(hour=0, minute=0, second=0) + datetime.timedelta(days=1)) event = sched.Event(self.process_data) self.schedule(event_time, event) self.oaf_raw = [] self.timestamp_raw = [] self.matemp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.cooling_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] elif publisher_id == 'publisher': _log.info('Simulated device data.') if self.run_aggregate is None: self.prev_time = dateutil.parser.parse( data[self.timestamp_name] ) self.run_aggregate = True time = dateutil.parser.parse(data[self.timestamp_name], fuzzy=True) time_delta = time - self.prev_time time_check = time + time_delta self.timestamp_raw.append(time) self.fan_status_raw.append(data[self.fan_status_name]) self.cooling_raw.append(data[self.coolcmd1_name]) self.heating_raw.append(data[self.heat_cmd1_name]) self.damper_raw.append(data[self.damper_name]) self.oatemp_raw.append(data[self.oat_name]) self.ratemp_raw.append(data[self.rat_name]) self.matemp_raw.append(data[self.mat_name]) if time.day < time_check.day: self.timestamp_raw.append(time_check) self.process_data() self.oaf_raw = [] self.timestamp_raw = [] self.oatemp_raw = [] self.ratemp_raw = [] self.cooling_raw = [] self.heating_raw = [] self.damper_raw = [] self.fan_status_raw = [] self.prev_time = time if publisher_id != 'publisher': self.timestamp_raw.append(datetime.datetime.now()) self.fan_status_raw.append(data[self.fan_status_name]) self.cooling_raw.append(data[self.coolcmd1_name]) self.heating_raw.append(data[self.heat_cmd1_name]) self.damper_raw.append(data[self.damper_name]) self.oatemp_raw.append(data[self.oat_name]) self.ratemp_raw.append(data[self.rat_name]) self.matemp_raw.append(data[self.mat_name]) Agent.__name__ = 'passiveafdd' return Agent(**kwargs)
# PACIFIC NORTHWEST NATIONAL LABORATORY # operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY # under Contract DE-AC05-76RL01830 #}}} import errno import logging import os #from mysql import connector from zmq.utils import jsonapi from basedb import DbDriver from volttron.platform.agent import utils utils.setup_logging() _log = logging.getLogger(__name__) class MySqlFuncts(DbDriver): def __init__(self, **kwargs): #kwargs['dbapimodule'] = 'mysql.connector' super(MySqlFuncts, self).__init__('mysql.connector', **kwargs) def query(self, topic, start=None, end=None, skip=0, count=None, order="FIRST_TO_LAST"): """This function should return the results of a query in the form: {"values": [(timestamp1, value1), (timestamp2, value2), ...], "metadata": {"key1": value1, "key2": value2, ...}} metadata is not required (The caller will normalize this to {} for you)
print os.path.basename(__file__)+"inside sock create connecttoZMQ" now = datetime.utcnow().isoformat(' ') + 'Z' headers = { headers_mod.CONTENT_TYPE: content_type, headers_mod.DATE: now, } print "created headers" #message = json.loads(message) print topic #self.publish("test","test","test msg") self.publish(topic, headers, message) print "published" ''' from datetime import datetime from volttron.lite.agent import BaseAgent, PublishMixin from volttron.lite.messaging import headers as headers_mod from volttron.lite.agent import utils import logging utils.setup_logging() _log = logging.getLogger(__name__) class ZMQ_PUB(PublishMixin, BaseAgent): def __init__(self, configpath,**kwargs): print "yeay..in" print "entering super"