def inside_temperature(self, cfg): if cfg['Server']: c = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: c = get_client() archiver = DataClient(c) uuids = [cfg['UUIDS']['thermostat_temperature']] temp_now = self.current_time start = '"' + temp_now.strftime('%Y-%m-%d %H:%M:%S') + ' PST"' end = '"' + (temp_now - datetime.timedelta(minutes=10) ).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' dfs = make_dataframe( archiver.window_uuids(uuids, end, start, '1min', timeout=120)) for uid, df in dfs.items(): if uid == uuids[0]: if 'mean' in df.columns: df = df[['mean']] df.columns = ['tin'] dfs[uid] = df.resample('1min').mean() uid, df = dfs.items()[0] df['tin'] = df['tin'].replace(to_replace=0, method='pad') return df['tin']
def normal_schedule(self): if self.simpleDr == True: if self.server: c = get_client(agent=self.agent, entity=self.entity) else: c = get_client() msg = c.query("xbos/events/dr/s.dr/sdb/i.xbos.dr_signal/signal/signal")[0] for po in msg.payload_objects: if po.type_dotted == (2, 9, 9, 9): data = msgpack.unpackb(po.content) print "DR EVENT" weekno = self.now.weekday() if weekno < 5: now_time = self.now.time() if now_time >= datetime.time(18, 0) or now_time < datetime.time(7, 0): self.workday_inactive() else: # ind=(now_time.hour+8)%24 ind = (now_time.hour) % 24 #print data[ind] if self.simpleDr == True and data[ind]['Price'] > 0.8: self.workday_inactive() else: self.workday() else: self.workday_inactive()
def normal_schedule(SimpleDR=False): if SimpleDR == True: if cfg['Server']: c = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: c = get_client() msg = c.query( "xbos/events/dr/s.dr/sdb/i.xbos.dr_signal/signal/signal")[0] for po in msg.payload_objects: if po.type_dotted == (2, 9, 9, 9): data = msgpack.unpackb(po.content) print "DR EVENT", data weekno = datetime.datetime.utcnow().replace( tzinfo=pytz.timezone("UTC")).astimezone( tz=pytz.timezone("America/Los_Angeles")).weekday() if weekno < 5: now = datetime.datetime.now(pytz.timezone("America/Los_Angeles")) now_time = now.time() if now_time >= datetime.time(18, 0) or now_time < datetime.time(7, 0): workday_inactive() else: #ind=(now_time.hour+8)%24 ind = (now_time.hour) % 24 print data[ind] if SimpleDR == True and data[ind]['Price'] > 0.8: workday_inactive() else: workday() else: workday_inactive()
def getMdalClient(pelicanConfig): client = None if "xbosEntityPath" in list(pelicanConfig.keys()): entityPath = pelicanConfig["xbosEntityPath"] c = get_client(entity=entityPath) client = mdal.MDALClient("xbos/mdal", client=c) else: c = get_client() client = mdal.MDALClient("xbos/mdal", client=c) return client
def __init__( self, cfg, now=datetime.datetime.utcnow().replace(tzinfo=pytz.timezone("UTC"))): self.cfg = cfg self.pytz_timezone = cfg["Data_Manager"]["Pytz_Timezone"] self.zone = cfg["Data_Manager"]["Zone"] self.interval = cfg["Interval_Length"] self.now = now if cfg["Data_Manager"]["Server"]: self.c = get_client(agent=cfg["Data_Manager"]["Agent_IP"], entity=cfg["Data_Manager"]["Entity_File"]) else: self.c = get_client()
def __init__(self): # get a bosswave client client = get_client() # defaults to $BW2_AGENT, $BW2_DEFAULT_ENTITY # Get hod client. hod_client = HodClient("xbos/hod", client) self.building_tstats = {} hod_xsg_match = True try: for bldg in XSG_ALL_BUILDINGS: # Getting the tstats for the building. self.building_tstats[bldg] = get_all_thermostats( client, hod_client, bldg) if not set(XSG_ALL_ZONES[bldg]).issubset( set(self.building_tstats[bldg])): missing_zones = [] for zone in XSG_ALL_ZONES[bldg]: if zone not in self.building_tstats[bldg]: missing_zones.append(zone) logging.critical( "zone mismatch between hod and xbos_services_getter for bldg: %s \nhod_zones:%s\nmissing zones:%s\n", bldg, self.building_tstats[bldg].keys(), missing_zones) hod_xsg_match = False if not hod_xsg_match: sys.exit(0) except Exception: tb = traceback.format_exc() logging.critical("failed to get thermostats\n%s", tb) sys.exit(0)
def __init__(self): # get a bosswave client c = get_client(entity="/Users/Daniel/CIEE/SetUp/ciee_readonly.ent", agent="127.0.0.1:28589") # get a HodDB client self.hod = HodClient("ciee/hod", c) # get an archiver client self.archiver = DataClient(c, archivers=["ucberkeley"]) self.zone_sensor_df = self.get_occupancy() self.building_df = self.get_building_occupany() self.zone_df = self.get_zone_occupancy()
def __init__(self, cfg, now=datetime.datetime.utcnow().replace( tzinfo=pytz.timezone("UTC")).astimezone( tz=pytz.timezone("America/Los_Angeles")), heat=4000, cool=4000, vent=500): # query the server to learn the energy rates of the next 8 hours self.heat = heat self.cool = cool self.vent = vent self.mode = cfg['energy_rates'] self.now = now if cfg['Server']: c = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: c = get_client() archiver = DataClient(c) uuids = [cfg['UUIDS']['energy_cost']] start = '"' + (now).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' end = '"' + ( now - timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' dfs = make_dataframe( archiver.window_uuids(uuids, end, start, '15min', timeout=120)) for uid, df in dfs.items(): if uid == uuids[0]: if 'mean' in df.columns: df = df[['mean']] df.columns = ['cost'] dfs[uid] = df.resample('15min').mean() uid, self.df = dfs.items()[0]
def __init__(self, cfg, zones, normal_zones, now=datetime.datetime.utcnow().replace(tzinfo=pytz.timezone("UTC"))): self.simpleDr = cfg["SimpleDR"] self.server = cfg["Data_Manager"]["Server"] self.entity = cfg["Data_Manager"]["Entity_File"] self.agent = cfg["Data_Manager"]["Agent_IP"] self.now = now.astimezone(tz=pytz.timezone(cfg["Data_Manager"]["Pytz_Timezone"])) # query server to get the available zones if self.server: client = get_client(agent=self.agent, entity=self.entity) else: client = get_client() hc = HodClientHTTP("http://ciee.cal-sdb.org") q = """SELECT ?uri ?zone WHERE { ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . ?tstat bf:uri ?uri . ?tstat bf:controls/bf:feeds ?zone . }; """ self.tstats = zones self.normal_zones = normal_zones
def _fetch_dataframe(): """ Returns a `pandas.DataFrame` with columns tin: the current temperature a: 0=noop, 1=cooling, 2=heating a1: is cooling? a2: is heating? next_temp: the temperature at the next time step """ with _silence(): # set $BW2_AGENT and $BW2_DEFAULT_ENTITY archiver = DataClient(get_client()) now = datetime.now(pytz.timezone('America/Los_Angeles')) start = (now + timedelta(minutes=15)).strftime(DATE_FMT) end = (now - timedelta(days=7)).strftime(DATE_FMT) dfs = make_dataframe(archiver.window_uuids(UUIDS, end, start, INTERVAL)) for uid, df in dfs.items(): if uid == SE_TEMP: if 'mean' in df.columns: df = df[['mean']] df.columns = ['tin'] elif uid == SE_STATE: if 'max' in df.columns: df = df[['max']] df.columns = ['a'] dfs[uid] = df.resample(INTERVAL).mean() df = pd.concat([dframe for uid, dframe in dfs.items()], axis=1) df['a1'] = df.apply(lambda row: int(row['a'] > 0 and row['a'] <= 1), axis=1) df['a2'] = df.apply(lambda row: int(row['a'] > 1), axis=1) # the following are the features used by the baseline model df['tin'] = df['tin'].replace(to_replace=0, method='pad') df['tin_a1'] = df.tin * df.a1 df['tin_a2'] = df.tin * df.a2 df['next_temp'] = df['tin'].shift(-1) # the following are necessary because rulinalg complains about ill-conditioning # note that numpy does not have this problem df.tin_a1 += np.random.randn(len(df.tin)) * 1e-8 df.tin_a2 += np.random.randn(len(df.tin)) * 1e-8 df = df.dropna() return df
def get_raw_data(building=None, client=None, cfg=None, start=None, end=None, days_back=50, force_reload=False): assert cfg is not None or building is not None if cfg is not None: building = cfg["Building"] else: config_path = SERVER_DIR_PATH + "/Buildings/" + building + "/" + building + ".yml" try: with open(config_path, "r") as f: cfg = yaml.load(f) except: print("ERROR: No config file for building %s with path %s" % (building, config_path)) return print("----- Get data for Building: %s -----" % building) path = SERVER_DIR_PATH + "/Thermal_Data/" + building # TODO ugly try/except if end is None: end = get_utc_now() if start is None: start = end - datetime.timedelta(days=days_back) # inside and outside data data import pickle try: assert not force_reload with open(path + "_inside", "r") as f: inside_data = pickle.load(f) with open(path + "_outside", "r") as f: outside_data = pickle.load(f) except: if client is None: client = get_client() dataManager = ThermalDataManager.ThermalDataManager(cfg, client) inside_data = dataManager._get_inside_data(start, end) outside_data = dataManager._get_outside_data(start, end) with open(path + "_inside", "wb") as f: pickle.dump(inside_data, f) with open(path + "_outside", "wb") as f: pickle.dump(outside_data, f) return inside_data, outside_data
def get_prediction(provider): c = get_client() days = [] msg = c.query( '{0}/forecast/demand_response/s.forecast_demand_response/dr/i.xbos.demand_response_forecast/signal/info' .format(provider)) if len(msg) == 0: return pos = msg[0].payload_objects if len(pos) == 0: return forecasts = msgpack.unpackb(pos[0].content) for forecast in forecasts: day = datetime.fromtimestamp(forecast.get('Date') / 1e9) print(forecast) likelihood = ['unlikely', 'possible', 'likely', 'confirmed'][forecast.get('Event_likelihood')] print('DR event?', day, '=>', likelihood) days.append({ 'date': int(forecast.get('Date') / 1e9), 'likelihood': likelihood }) msg = c.query( '{0}/confirmed/demand_response/s.confirmed_demand_response/dr/i.xbos.demand_response_confirmed/signal/info' .format(provider)) if len(msg) == 0: return pos = msg[0].payload_objects if len(pos) == 0: return forecast = msgpack.unpackb(pos[0].content) day = datetime.fromtimestamp(forecast.get('Date') / 1e9) likelihood = ['no event', 'confirmed'][forecast.get('Event_status')] print('DR event?', day, '=>', likelihood) days.append({ 'date': int(forecast.get('Date') / 1e9), 'likelihood': likelihood }) return days
def run(): c = get_client(config.AGENT, config.ENTITY) def cb(msg): po = msgpack.unpackb(msg.payload_objects[0].content) if not isinstance(po, dict): return client_id = msg.uri.split('/')[2] start = po.get('predstart') start = parse(start) if start else get_today() end = po.get('predend') end = parse(end) if end else get_today() + datetime.timedelta( days=1) resolution = po.get('resolution', '1h') result = prediction_fxn(start, end, resolution) po = PayloadObject((2, 0, 0, 0), None, msgpack.packb(result)) publish = '{0}/s.predictions/{1}/i.{2}/signal/response'.format( namespace, client_id, prediction_type) print "Respond on", publish c.publish(publish, payload_objects=(po, )) c.subscribe(subscribe, cb) while True: time.sleep(10)
break if in_between(index.time(), datetime.time(int(j[0].split(":")[0]), int(j[0].split(":")[1])), datetime.time(int(j[1].split(":")[0]), int(j[1].split(":")[1]))) : pricing.append([index,j[2]*kwH(row["State"])]) break return pricing with open("cost_config.yml", 'r') as ymlfile: cfg = yaml.load(ymlfile) if cfg["Server"]: c = get_client(agent=cfg["Agent_IP"], entity=cfg["Entity_File"]) else: c = get_client() uspac = pytz.timezone(cfg["Pytz_Timezone"]) startime = uspac.localize(datetime.datetime.strptime(cfg["Start_Date"], '%Y-%m-%d %H:%M:%S')).astimezone(tz=pytz.utc) endtime = uspac.localize(datetime.datetime.strptime(cfg["End_Date"], '%Y-%m-%d %H:%M:%S')).astimezone(tz=pytz.utc) for i in cfg["Zones"]: df = preprocess_data(UUIDS[i], c, startime, endtime) price_array = cfg["Pricing"][cfg["Pricing"]["Energy_Rates"]] DRs = cfg["Pricing"]["DRs"] costs = cost_calculator(df, DRs, price_array) print i + " : " + str(sum([i[1] for i in costs])) + " USD"
def choose_client(cfg=None): if cfg is not None and cfg["Server"]: client = get_client(agent=cfg["Agent_IP"], entity=cfg["Entity_File"]) else: client = get_client() return client
def getData(building, zone, date): """Whatever data we get should be stored. date: in PST""" root, dirs, files = os.walk("CacheThanos/").next() Flag = False for index, thefile in enumerate(files, start=1): if str(building) + str(zone) + str(date) + ".dat" == thefile: Flag = True if Flag == False: # get config cfg = utils.get_config(building) zone_cfg = utils.get_zone_config(building, zone) events = [] zone_log = utils.get_zone_formalog(building, zone) if zone_log: for line in zone_log: dateLog = utils.get_mdal_string_to_datetime( line.split(" : ")[0]) dateLog = dateLog.astimezone(pytz.timezone("US/Pacific")) if dateLog.date() == date.date(): events.append((int( (dateLog.replace(tzinfo=None) - date.replace(tzinfo=None)).total_seconds() / 60), line.split(" : ")[1])) interval = cfg["Interval_Length"] # client = utils.choose_client(cfg) client = get_client() start = date.replace(hour=0, minute=0, second=0) end = date.replace(day=date.day + 1, hour=0, minute=0, second=0) # Generate utc times. Use UTC for any archiver getting methods. pst_pytz = pytz.timezone("US/Pacific") start_pst = pst_pytz.localize(start) start_utc = start_pst.astimezone(pytz.timezone("UTC")) end_pst = pst_pytz.localize(end) end_utc = end_pst.astimezone(pytz.timezone("UTC")) datamanager = DataManager(cfg, zone_cfg, client, zone, now=start_utc) # get setpoints ground_truth_setpoints_df = datamanager.thermostat_setpoints( start_utc, end_utc)[zone] # from archiver ground_truth_setpoints_df.index = ground_truth_setpoints_df.index.tz_convert( pst_pytz) config_setpoints_df = datamanager.better_comfortband(start) safety_setpoints_df = datamanager.better_safety(start) config_setpoints = config_setpoints_df[["t_low", "t_high"]].values safety_setpoints = safety_setpoints_df[["t_low", "t_high"]].values # Get tstat and weather data thermal_data_manager = ThermalDataManager(cfg, client) inside_data, outside_data = utils.get_raw_data(building=building, client=client, cfg=cfg, start=start_utc, end=end_utc, force_reload=True) zone_inside_data = inside_data[zone] zone_inside_data.index = zone_inside_data.index.tz_convert(pst_pytz) outside_data = thermal_data_manager._preprocess_outside_data( outside_data.values()) outside_data.index = outside_data.index.tz_convert(pst_pytz) outside_data = outside_data.resample("1T").interpolate() Tin = zone_inside_data["t_in"].values if np.isnan(Tin).any(): print "Warning: Tin contains NaN. Estimates are based on interpolations" nans, x = nan_helper(Tin) Tin[nans] = np.interp(x(nans), x(~nans), Tin[~nans]) # TODO shitty hack # taking the raw data and putting it into a data frame full of nan. Then, interpolating the data to get # data for the whole day. Tout = pd.DataFrame(columns=["t_out"], index=pd.date_range(start=start, end=end, freq="1T")) Tout.index = Tout.index.tz_localize(pst_pytz) Tout["t_out"][outside_data.index[0]:outside_data. index[-1]] = outside_data["t_out"] Tout = Tout.ffill()["t_out"].values[:1440] Policy = zone_inside_data["action"].values # Prepare discomfort discomfortManager = Discomfort(setpoints=config_setpoints) # get occupancies occupancy_config = datamanager.better_occupancy_config(start) try: occupancy_ground = datamanager.occupancy_archiver(start=start, end=end) except: if zone_cfg["Advise"]["Occupancy_Sensors"] == True: print("Warning, could not get ground truth occupancy.") occupancy_ground = None if occupancy_ground is None: occupancy_use = occupancy_config else: occupancy_use = occupancy_ground occupancy_use = occupancy_use["occ"].values discomfort = [] for i in range(len(Tin)): # for the ith minute print len(Tin), len(occupancy_use) assert len(Tin) <= len(occupancy_use) tin = Tin[i] occ = occupancy_use[i] discomfort.append( discomfortManager.disc(t_in=tin, occ=occ, node_time=i, interval=1)) # get consumption and cost and prices prices = datamanager.better_prices(start).values heating_consumption = zone_cfg["Advise"]["Heating_Consumption"] cooling_consumption = zone_cfg["Advise"]["Cooling_Consumption"] energy_manager = EnergyConsumption(prices, interval, now=None, heat=heating_consumption, cool=cooling_consumption) cost = [] for i in range(len(Policy)): # see it as the ith minute. That's why we need the assert assert len(Policy) <= len(prices) action = Policy[i] cost.append(energy_manager.calc_cost(action=action, time=i)) cost = np.array(cost) # Cache the data and check if already downloaded! OPs = occupancy_use[:1440] TinsUPComfortBand = config_setpoints_df["t_high"][:1440] TinsDOWNComfortBand = config_setpoints_df["t_low"][:1440] TinsUPSafety = safety_setpoints_df["t_high"][:1440] TinsDOWNSafety = safety_setpoints_df["t_low"][:1440] TinsUPsp = ground_truth_setpoints_df["t_high"][:1440] TinsDOWNsp = ground_truth_setpoints_df["t_low"][:1440] Costs = cost[:1440] Prices = prices[:1440] Discomforts = discomfort[:1440] temp = OPs, Tin, Tout, Policy, TinsUPComfortBand, TinsDOWNComfortBand, TinsUPSafety, TinsDOWNSafety, TinsUPsp, TinsDOWNsp, Costs, Prices, Discomforts, events, building, zone, date pickle.dump( temp, open( "CacheThanos/" + str(building) + str(zone) + str(Date) + ".dat", "wb")) return temp else: return pickle.load( open( "CacheThanos/" + str(building) + str(zone) + str(date) + ".dat", "rb"))
from xbos import get_client # for interacting with archiver from xbos.services.pundat import DataClient, timestamp, make_dataframe, merge_dfs # for performing Brick queries from xbos.services.hod import HodClient # for interacting with the thermostat control state from xbos.devices.thermostat import Thermostat # for deserializing messages import msgpack import time import pandas as pd # get a bosswave client c = get_client() # defaults to $BW2_AGENT, $BW2_DEFAULT_ENTITY # get a HodDB client hod = HodClient("ciee/hod", c) # get an archiver client archiver = DataClient(c, archivers=["ucberkeley"]) # mode OFF = 0 HEAT = 1 COOL = 2 AUTO = 3 # store zone name to thermostat zone2tstat = {} # store zone name to meter for the RTU for that zone zone2meter = {}
start = (now - timedelta(days=40)).strftime('%Y-%m-%d %H:%M:%S %Z') WINDOW = '1min' # data clients. # To get the client we usually need a client for BOSSWAVE our decentralized operating system. # Easiest way to get it is by using get_client() which you import from xbos. Other ways include entity files. # https://github.com/SoftwareDefinedBuildings/XBOS. # To use xbos make sure to get an entity file from Thanos and to get a executable file which # connects you to the system. Also, make sure to set the entity file in your bash_profile with # export BW2_DEFAULT_ENTITY=path to .ent file # The MDAL client gets the data from our database. The query to get the data is illustrated by, # buidling_meteres_query_mdal and lighting_meter_query_mdal. # Documentation: https://docs.xbos.io/mdal.html#using and https://github.com/gtfierro/mdal <- better mdal = MDALClient("xbos/mdal", client=get_client()) # HODClient gets the uuid for data. This uses brick which is a language built on SPARQL. # Can be trick to use. # To try your own queries go to: corbusier.cs.berkeley.edu:47808. And try the queries we set up below. # Documentation: for brick: brickschema.org/structure/ # If you need queries, it's best to ask either Thanos or Daniel. hod = HodClient("xbos/hod") # temporal parameters SITE = "ciee" # Brick queries building_meters_query = """SELECT ?meter ?meter_uuid FROM %s WHERE { ?meter rdf:type brick:Building_Electric_Meter . ?meter bf:uuid ?meter_uuid . };"""
def __init__(self, cfg, now=datetime.datetime.utcnow().replace( tzinfo=pytz.timezone("UTC")).astimezone( tz=pytz.timezone("America/Los_Angeles"))): # query the server to get all the available occupancy sensors zone_name = cfg['zone'] if cfg['Server']: c = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: c = get_client() archiver = DataClient(c) hod = HodClient("ciee/hod", c) occ_query = """SELECT ?sensor ?uuid ?zone WHERE { ?sensor rdf:type brick:Occupancy_Sensor . ?sensor bf:isLocatedIn/bf:isPartOf ?zone . ?sensor bf:uuid ?uuid . ?zone rdf:type brick:HVAC_Zone }; """ results = hod.do_query(occ_query) uuids = [[x['?zone'], x['?uuid']] for x in results['Rows']] temp_now = now # select the sensors that are contained in the zone we are optimizing for query_list = [] for i in uuids: if i[0] == zone_name: query_list.append(i[1]) start = '"' + (temp_now).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' end = '"' + (temp_now - timedelta(days=30) ).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' dfs = make_dataframe( archiver.window_uuids(query_list, end, start, '15min', timeout=120)) for uid, df in dfs.items(): if 'mean' in df.columns: df = df[['mean']] df.columns = ['occ'] dfs[uid] = df.resample('15min').mean() df = dfs.values()[0] if len(dfs) > 1: for newdf in dfs.values()[1:]: df['occ'] += newdf['occ'] df['occ'] = 1 * (df['occ'] > 0) df.index = df.index.tz_localize(pytz.timezone("America/Los_Angeles")) observation_length_addition = 4 * 60 k = 5 prediction_time = 4 * 60 resample_time = 15 #now = df.index[-prediction_time/resample_time] now = df.index[-1] observation_length = mins_in_day(now) + observation_length_addition similar_moments = find_similar_days(df, now, observation_length, k) self.predictions = predict(df, now, similar_moments, prediction_time, resample_time)
try: params = json.loads(f.read()) except ValueError as e: print "Invalid parameter file" sys.exit(1) emu_instance = emu(params["port"]) emu_instance.start_serial() # get network info emu_instance.get_network_info() while not hasattr(emu_instance, 'NetworkInfo'): time.sleep(10) macid = emu_instance.NetworkInfo.DeviceMacId c = get_client(agent=params["agent"], entity=params["entity"]) PONUM = (2, 0, 9, 1) baseuri = params["baseuri"] signaluri = "{0}/s.emu2/{1}/i.meter/signal/meter".format(baseuri, macid) print ">", signaluri def send_message(msg): """ msg has keys: current_demand current_price current_tier current_summation_delivered current_summation_received
def preprocess_thermal(self, cfg): # if state is 1 we are doing heating def f1(row): if row['a'] > 0 and row['a'] <= 1: val = 1 else: val = 0 return val # if state is 2 we are doing cooling def f2(row): if row['a'] > 1 and row['a'] <= 2: val = 1 else: val = 0 return val final_df = pd.DataFrame() flag = True heating_count = 0 cooling_count = 0 do_nothing_count = 0 max_action_count = 50 month_count = 0 while flag: # query the server for the thermostat state and temperature if cfg['Server']: c = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: c = get_client() archiver = DataClient(c) uuids = [ cfg['UUIDS']['thermostat_temperature'], cfg['UUIDS']['thermostat_state'] ] temp_now = self.current_time start = '"' + (temp_now + timedelta(minutes=15) - timedelta(days=(month_count) * 30) ).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' end = '"' + (temp_now - timedelta(days=(month_count + 1) * 30) ).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' dfs = make_dataframe( archiver.window_uuids(uuids, end, start, '15min', timeout=120)) for uid, df in dfs.items(): if uid == uuids[0]: if 'mean' in df.columns: df = df[['mean']] df.columns = ['tin'] elif uid == uuids[1]: if 'max' in df.columns: df = df[['max']] df.columns = ['a'] dfs[uid] = df.resample('15min').mean() df = pd.concat([dframe for uid, dframe in dfs.items()], axis=1) df['a1'] = df.apply(f1, axis=1) df['a2'] = df.apply(f2, axis=1) df['tin'] = df['tin'].replace(to_replace=0, method='pad') df['temp_next'] = df['tin'].shift(-1) df = df.dropna() for idx in reversed(df.index): if df.at[idx, 'a1'] == 1 and heating_count <= max_action_count: if df.at[idx, 'tin'] <= df.at[idx, 'temp_next']: final_df = final_df.append(df[df.index == idx]) heating_count += 1 elif df.at[idx, 'a2'] == 1 and cooling_count < max_action_count: if df.at[idx, 'tin'] >= df.at[idx, 'temp_next']: final_df = final_df.append(df[df.index == idx]) cooling_count += 1 elif df.at[idx, 'a1'] == 0 and df.at[ idx, 'a2'] == 0 and do_nothing_count < max_action_count: final_df = final_df.append(df[df.index == idx]) do_nothing_count += 1 if heating_count>=max_action_count and cooling_count>=max_action_count\ and do_nothing_count>=max_action_count: flag = False break month_count += 1 return shuffle(final_df)
def hvac_control(): try: # query the server to lean the current setpoints and the state of the thermostat if cfg['Server']: c = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: c = get_client() archiver = DataClient(c) uuids = [ cfg['UUIDS']['thermostat_high'], cfg['UUIDS']['thermostat_low'], cfg['UUIDS']['thermostat_mode'] ] temp_now = datetime.datetime.utcnow().replace( tzinfo=pytz.timezone("UTC")).astimezone( tz=pytz.timezone("America/Los_Angeles")) start = '"' + temp_now.strftime('%Y-%m-%d %H:%M:%S') + ' PST"' end = '"' + (temp_now - datetime.timedelta(minutes=10) ).strftime('%Y-%m-%d %H:%M:%S') + ' PST"' dfs = make_dataframe( archiver.window_uuids(uuids, end, start, '1min', timeout=120)) for uid, df in dfs.items(): if uid == uuids[0]: if 'mean' in df.columns: df = df[['mean']] df.columns = ['t_high'] elif uid == uuids[1]: if 'mean' in df.columns: df = df[['mean']] df.columns = ['t_low'] elif uid == uuids[2]: if 'mean' in df.columns: df = df[['mean']] df.columns = ['mode'] dfs[uid] = df.resample('1min').mean() df = pd.concat([dframe for uid, dframe in dfs.items()], axis=1) except: e = sys.exc_info()[0] print e return False # document the "before" state try: f = open(filename, 'a') f.write("Did read: " + str(df['t_low'][-1]) + ", " + str(df['t_high'][-1]) + ", " + str(df['mode'][-1]) + "\n") f.close() except: print "Could not document changes." # choose the apropriate setpoints according to weekday and time weekno = temp_now.weekday() if weekno < 5: now = datetime.datetime.utcnow().replace( tzinfo=pytz.timezone("UTC")).astimezone( tz=pytz.timezone("America/Los_Angeles")) now_time = now.time() if now_time >= datetime.time(18, 0) or now_time < datetime.time(7, 0): heating_setpoint = 62. cooling_setpoint = 85. else: heating_setpoint = 70. cooling_setpoint = 76. else: heating_setpoint = 62. cooling_setpoint = 85. try: adv = Advise(cfg) action, temp = adv.advise() temp = float(temp) except: e = sys.exc_info()[0] print e return False # action "0" is Do Nothing, action "1" is Cooling, action "2" is Heating if action == "0": p = { "override": True, "heating_setpoint": math.floor(temp - 0.1) - 1, "cooling_setpoint": math.ceil(temp + 0.1) + 1, "mode": 3 } print "Doing nothing" print p # document changes try: f = open(filename, 'a') f.write("Did write: " + str(math.floor(temp - 0.1) - 1) + ", " + str(math.ceil(temp + 0.1) + 1) + ", " + str(3) + "\n") f.close() except: print "Could not document changes." elif action == "1": p = { "override": True, "heating_setpoint": heating_setpoint, "cooling_setpoint": math.floor(temp - 0.1), "mode": 3 } print "Heating" print p # document changes try: f = open(filename, 'a') f.write("Did write: " + str(heating_setpoint) + ", " + str(math.floor(temp - 0.1)) + ", " + str(3) + "\n") f.close() except: print "Could not document changes." elif action == "2": p = { "override": True, "heating_setpoint": math.ceil(temp + 0.1), "cooling_setpoint": cooling_setpoint, "mode": 3 } print "Cooling" print p # document changes try: f = open(filename, 'a') f.write("Did write: " + str(math.ceil(temp + 0.1)) + ", " + str(cooling_setpoint) + ", " + str(3) + "\n") f.close() except: print "Could not document changes." else: print "Problem with action." return False # try to commit the changes to the thermostat, if it doesnt work 10 times in a row ignore and try again later for z in normal_zones: for i in range(10): try: zones[z].write(p) break except: if i == 9: e = sys.exc_info()[0] print e return False continue return True
params = json.loads(f.read()) except ValueError as e: print "Invalid parameter file" sys.exit(1) emu_instance = emu(params["port"]) emu_instance.start_serial() # get network info emu_instance.get_network_info() while not hasattr(emu_instance, 'NetworkInfo'): time.sleep(10) macid = emu_instance.NetworkInfo.DeviceMacId c = get_client(agent=params["agent"], entity=params["entity"]) PONUM = (2,0,9,1) baseuri = params["baseuri"] signaluri = "{0}/s.emu2/{1}/i.meter/signal/meter".format(baseuri, macid) print ">",signaluri def send_message(msg): """ msg has keys: current_demand current_price current_tier current_summation_delivered current_summation_received """ po = PayloadObject(PONUM, None, msgpack.packb(msg))
import pandas as pd import json import sys import time import schedule with open("params.json") as f: try: params = json.loads(f.read()) except ValueError: print "Invalid parameter file" sys.exit(1) # setup clients client = get_client() dataclient = DataClient(client, archivers=[params["ARCHIVER_URI"]]) hodclient = HodClient(params["HOD_URI"], client) slack_token = params["SLACK_API_TOKEN"] sc = SlackClient(slack_token) def notify(msg): sc.api_call("chat.postMessage",channel="#xbos_alarms",text=msg) # get all thermostat states query = """SELECT ?dev ?uri WHERE { ?dev rdf:type/rdfs:subClassOf* brick:Thermostat . ?dev bf:uri ?uri . };""" res = hodclient.do_query(query) if res["Count"] == 0:
from xbos import get_client from xbos.services.hod import HodClient from xbos.devices.light import Light from xbos.devices.occupancy_sensor import Occupancy_Sensor import pandas as pd from xbos.services import mdal import datetime, pytz from datetime import timedelta ############################################### Initializing our datasets ACTUATE = False SITE = "ciee" #Cahnge this according to the site #print RoomsType.RoomsType[df['column_name'] == some_value] c = get_client() hod = HodClient("xbos/hod") skipped = "" ####### occ query occ_query = """SELECT * FROM %s WHERE { ?l rdf:type brick:Lighting_System . ?l bf:feeds ?room . ?room rdf:type brick:Room . ?l bf:uri ?luri . ?l bf:hasPoint ?p . ?p rdf:type brick:Occupancy_Sensor . ?p bf:uri ?puri . ?p bf:uuid ?puuid
ind = (now_time.hour) % 24 #print data[ind] if self.simpleDr == True and data[ind]['Price'] > 0.8: self.workday_inactive() else: self.workday() else: self.workday_inactive() if __name__ == '__main__': with open("config_south.yml", 'r') as ymlfile: cfg = yaml.load(ymlfile) if cfg["Data_Manager"]["Server"]: client = get_client(agent=cfg["Data_Manager"]["Agent_IP"], entity=cfg["Data_Manager"]["Entity_File"]) else: client = get_client() hc = HodClientHTTP("http://ciee.cal-sdb.org") q = """SELECT ?uri ?zone WHERE { ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . ?tstat bf:uri ?uri . ?tstat bf:controls/bf:feeds ?zone . }; """ tstats = {} for tstat in hc.do_query(q): print tstat tstats[tstat["?zone"]] = Thermostat(client, tstat["?uri"])
from xbos.services.pundat import DataClient, make_dataframe # read from config file try: yaml_filename = sys.argv[1] except: sys.exit( "Please specify the configuration file as: python2 controller.py config_file.yaml" ) with open(yaml_filename, 'r') as ymlfile: cfg = yaml.load(ymlfile) # query server to get the available zones if cfg['Server']: client = get_client(agent='172.17.0.1:28589', entity="thanos.ent") else: client = get_client() hc = HodClientHTTP("http://ciee.cal-sdb.org") q = """SELECT ?uri ?zone WHERE { ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . ?tstat bf:uri ?uri . ?tstat bf:controls/bf:feeds ?zone . }; """ zones = {} for tstat in hc.do_query(q): print tstat zones[tstat["?zone"]] = Thermostat(client, tstat["?uri"])
def generate_months(lastN): firstDayThisMonth = get_today().replace(day=1) ranges = [[get_today(), firstDayThisMonth]] lastN = int(lastN) while lastN > 0: firstDayLastMonth = firstDayThisMonth - relativedelta(months=1) ranges.append([ firstDayThisMonth - timedelta(days=1) + timedelta(hours=1), firstDayLastMonth ]) firstDayThisMonth = firstDayLastMonth lastN -= 1 return ranges c = get_client(config.AGENT, config.ENTITY) app = Flask(__name__, static_url_path='') @app.route('/api/power/<last>/in/<bucketsize>') @crossdomain(origin="*") def power_summary(last, bucketsize): # first, determine the start date from the 'last' argument start_date = get_start(last) if last == 'year' and bucketsize == 'month': ranges = generate_months(get_today().month - 1) readings = [] times = [] for t0, t1 in ranges:
# set new heating setpoint print "Setting new fan_mode to:", new_fanmode tstat.set_fan_mode(new_fanmode) # wait for update time.sleep(15) # check if it changed if tstat.fan_mode == old_fanmode: print "BAD FAN MODE", tstat.fan_mode # reset to old value print "Setting fan mode back to:", old_fanmode tstat.set_fan_mode(old_fanmode) print "===Fan test passes.===" if __name__ == '__main__': URIs = ["scratch.ns/demo/s.imt550c/410soda/i.xbos.thermostat"] c = get_client() for uri in URIs: tstat = Thermostat(c, uri) heating(tstat) cooling(tstat) override(tstat) mode(tstat) fan(tstat)
if __name__ == '__main__': import sys Server = True Safemode = False try: Building = sys.argv[1] except: sys.exit("Please specify the building name as an argument") if Server == True: Entity_File = "../thanos.ent" Agent_IP = '172.17.0.1:28589' client = get_client(Agent_IP, Entity_File) else: client = get_client() ''' avenal-veterans-hall avenal-public-works-yard avenal-animal-shelter avenal-movie-theatre avenal-recreation-center ''' # Building = "avenal-veterans-hall" collector = ThermalDataCollector(client, Building, Safemode) interval_function = lambda action: 90 if action != 0 else 1 collector.main(interval_function, dt=15)