def dofetch(views, dataframes, start=None, end=None): timeparams = None if start is not None and end is not None: timeparams = pymortar.TimeParams( start=start.isoformat(), end=end.isoformat(), ) req = pymortar.FetchRequest(sites=sites, views=views, dataFrames=dataframes, time=timeparams) return client.fetch(req)
def get_from_pymortar(start, end, uuid, pymortar_client): price_stream = pymortar.DataFrame(name="price_data", uuids=[uuid], aggregation=pymortar.MEAN, window="1h") price_time_params = pymortar.TimeParams( start=rfc3339(start), end=rfc3339(end), ) price_request = pymortar.FetchRequest(sites=[""], dataFrames=[price_stream], time=price_time_params) return pymortar_client.fetch(price_request)["price_data"]
def get_outside_temp_data(building): interval = 3600 outside_temperature_query = """SELECT ?temp WHERE { ?temp rdf:type brick:Weather_Temperature_Sensor . };""" weather_stations_view = pymortar.View( name="weather_stations_view", sites=[building], definition=outside_temperature_query, ) weather_stations_stream = pymortar.DataFrame( name="weather_stations", aggregation=pymortar.MEAN, window=str(int(interval)) + 's', timeseries=[ pymortar.Timeseries( view="weather_stations_view", dataVars=["?temp"], ) ] ) weather_stations_time_params = pymortar.TimeParams( start=rfc3339(start), end=rfc3339(end), ) request = pymortar.FetchRequest( sites=[building], views=[ weather_stations_view ], dataFrames=[ weather_stations_stream ], time=weather_stations_time_params ) df = outside_temperature_data = pymortar_client.fetch(request)['weather_stations'] return df
def get_points_for_equipment(equipment, building, pymortar_client): """Gets points for each equipment in the list of equipment""" v = pymortar.View( name=equipment, definition=""" SELECT ?{eq_lower} ?point ?class FROM {building} WHERE {{ ?{eq_lower} rdf:type brick:{eq_class} . ?{eq_lower} bf:hasPoint ?point . ?point rdf:type ?class }};""".format(eq_lower=equipment.lower(), eq_class=equipment, building=building) ) res = pymortar_client.fetch(pymortar.FetchRequest( sites=[building], views=[v] )) return res.view(equipment)['class'].unique()
def get_power(site, start, end, agg, window, cli): power_query = """SELECT ?meter WHERE { ?meter rdf:type brick:Green_Button_Meter };""" query_agg = eval('pymortar.' + str.upper(agg)) request = pymortar.FetchRequest( sites=[site], views=[pymortar.View(name='power', definition=power_query)], time=pymortar.TimeParams(start=start, end=end), dataFrames=[ pymortar.DataFrame(name='power', aggregation=query_agg, window=window, timeseries=[ pymortar.Timeseries(view='power', dataVars=['?meter']) ]) ]) result = cli.fetch(request) return result['power']
def get_weather(site, start, end, agg, window, cli): weather_query = """SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor };""" query_agg = eval('pymortar.' + str.upper(agg)) request = pymortar.FetchRequest( sites=[site], views=[pymortar.View(name='weather', definition=weather_query)], time=pymortar.TimeParams(start=start, end=end), dataFrames=[ pymortar.DataFrame(name='weather', aggregation=query_agg, window=window, timeseries=[ pymortar.Timeseries(view='weather', dataVars=['?t']) ]) ]) result = cli.fetch(request) return result['weather']
def get_mortar_oat_uuid(uuid, start, end, window, agg, pymortar_client): oat_df = pymortar.DataFrame( name="weather_stations", uuids=[uuid], aggregation=agg, window=str(int(window)) + 's', ) oat_time_params = pymortar.TimeParams( start=rfc3339(start), end=rfc3339(end), ) oat_request = pymortar.FetchRequest(sites=[""], dataFrames=[oat_df], time=oat_time_params) outside_temperature_data = pymortar_client.fetch( oat_request)['weather_stations'] if outside_temperature_data is None: return None, "did not fetch data from pymortar for uuid: %s" % uuid return outside_temperature_data, None
def get_equipment_list(building, pymortar_client): """Gets unique list of equipment in the building""" #Get unique list of equipment v = pymortar.View( name="equipment", definition=""" SELECT ?equipname ?equipclass ?point ?pointclass FROM %s WHERE { ?equipclass rdfs:subClassOf+ brick:Equipment . ?equipname rdf:type ?equipclass . ?equipname bf:hasPoint ?point . ?point rdf:type ?pointclass };""" % building ) res = pymortar_client.fetch(pymortar.FetchRequest( sites=[building], views=[v] )) # Excludes CentralPlant for now equip_query = res.query("SELECT DISTINCT SUBSTR(equipclass, INSTR(equipclass, '#') + 1) AS eq FROM equipment WHERE equipname NOT LIKE '%Central%'") return [row[0] for row in equip_query]
def read_config(): """ Reads config.json file that contains parameters for baselines and fetches data from Mortar. Returns ------- pd.DataFrame(), pd.DataFrame(), default(list), default(list) meter data, oat data, map of uuid to meter data, map of uuid to oat data """ # Instatiate Client client = pymortar.Client({}) # Query for meter data query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };" # Query for outdoor air temperature data query_oat = """ SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor };""" # Get list of sites for meter data and OAT data resp_meter = client.qualify([query_meter]) resp_oat = client.qualify([query_oat]) if resp_meter.error or resp_oat.error: print("ERORR: ", resp_meter.error if True else resp_oat.error) os._exit(0) else: # Get list of sites that are common for meter data and OAT data common_sites = list( set(resp_meter.sites).intersection(set(resp_oat.sites))) # If config['sites'] = "", then default to all sites if not config['sites']: config['sites'] = common_sites else: for site in config['sites']: if site not in common_sites: print('Incorrect site name.') os._exit(0) print("Running on {0} sites".format(len(config['sites']))) # Define the view of meters (metadata) meter = pymortar.View( name="view_meter", sites=config['sites'], definition=query_meter, ) # Define the view of OAT (metadata) oat = pymortar.View(name="view_oat", sites=config['sites'], definition=query_oat) # Define the meter timeseries stream data_view_meter = pymortar.DataFrame( name="data_meter", # dataframe column name aggregation=pymortar.MEAN, window="15m", timeseries=[ pymortar.Timeseries(view="view_meter", dataVars=["?meter"]) ]) # Define the OAT timeseries stream data_view_oat = pymortar.DataFrame( name="data_oat", aggregation=pymortar.MEAN, window="15m", timeseries=[pymortar.Timeseries(view="view_oat", dataVars=["?t"])]) # Define timeframe time_params = pymortar.TimeParams(start=config['time']['start'], end=config['time']['end']) # Form the full request object request = pymortar.FetchRequest( sites=config['sites'], views=[meter, oat], dataFrames=[data_view_meter, data_view_oat], time=time_params) # Fetch data from request data = client.fetch(request) # Renames columns from uuids' to sitenames' map_uuid_meter, map_uuid_oat = map_uuid_sitename(data) # Save data to csv file if config['save_data']: data['data_meter'].to_csv('meter_data.csv') data['data_oat'].to_csv('oat_data.csv') return data['data_meter'], data['data_oat'], map_uuid_meter, map_uuid_oat
req = pymortar.FetchRequest( sites=resp.sites, views=[ pymortar.View( name="test1", definition= "SELECT ?vav WHERE { ?vav rdf:type/rdfs:subClassOf* brick:Temperature_Sensor };", ), pymortar.View( name="meter", definition= "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Electric_Meter };", ), ], dataFrames=[ pymortar.DataFrame( name="meter_data", aggregation=pymortar.MEAN, window="5m", uuids=["b8166746-ba1c-5207-8c52-74e4700e4467"], #timeseries=[ # pymortar.Timeseries( # view="meter", # dataVars=["?meter"], # ) #] ) ], time=pymortar.TimeParams( start="2019-01-01T00:00:00Z", end="2019-04-01T00:00:00Z", ))
def tstat_zone_analysis(client, resample_minutes, start_time, end_time): st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ") et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ") print(st) print(et) tstat_query = """ SELECT ?tstat ?room ?zone ?state ?temp ?hsp ?csp WHERE { ?tstat bf:hasLocation ?room . ?zone bf:hasPart ?room . ?tstat bf:hasPoint ?state . ?tstat bf:hasPoint ?temp . ?tstat bf:hasPoint ?hsp . ?tstat bf:hasPoint ?csp . ?zone rdf:type/rdfs:subClassOf* brick:Zone . ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . ?state rdf:type/rdfs:subClassOf* brick:Thermostat_Status . ?temp rdf:type/rdfs:subClassOf* brick:Temperature_Sensor . ?hsp rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint . ?csp rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Cooling_Setpoint . }; """ qualify_response = client.qualify([tstat_query]) if qualify_response.error != "": print("ERROR: ", qualify_response.error) os.exit(1) print("Running on {0} sites".format(len(qualify_response.sites))) tstat_view = pymortar.View( name="tstat_points", sites=qualify_response.sites, definition=tstat_query, ) tstat_streams = pymortar.DataFrame( name="thermostat_data", aggregation=pymortar.MAX, window="1m", timeseries=[ pymortar.Timeseries(view="tstat_points", dataVars=["?state", "?temp", "?hsp", "?csp"]) ]) time_params = pymortar.TimeParams(start=st, end=et) request = pymortar.FetchRequest( sites=qualify_response.sites, # from our call to Qualify views=[tstat_view], dataFrames=[tstat_streams], time=time_params) result = client.fetch(request) tstat_df = result['thermostat_data'] tstats = [ tstat[0] for tstat in result.query("select tstat from tstat_points") ] error_df_list = [] for tstat in tstats: q = """ SELECT state_uuid, temp_uuid, hsp_uuid, csp_uuid, room, zone, site FROM tstat_points WHERE tstat = "{0}"; """.format(tstat) res = result.query(q) if len(res) == 0: continue state_col, iat_col, hsp_col, csp_col, room, zone, site = res[0] df = tstat_df[[state_col, iat_col, hsp_col, csp_col]] df.columns = ['state', 'iat', 'hsp', 'csp'] df2 = pd.DataFrame() resample_time = '{0}T'.format(resample_minutes) df2['min_hsp'] = df['hsp'].resample(resample_time).min() df2['min_csp'] = df['csp'].resample(resample_time).min() df2['max_hsp'] = df['hsp'].resample(resample_time).max() df2['max_csp'] = df['csp'].resample(resample_time).max() df2['heat_percent'] = df['state'].resample(resample_time).apply( lambda x: ((x == 1).sum() + (x == 4).sum()) / resample_minutes * 100) df2['cool_percent'] = df['state'].resample(resample_time).apply( lambda x: ((x == 2).sum() + (x == 5).sum()) / resample_minutes * 100) df2['tstat'] = tstat df2['room'] = room.split('#')[1] df2['zone'] = zone.split('#')[1] df2['site'] = site df2['both_heat_cool'] = False df2.loc[((df2.heat_percent > 0) & (df2.cool_percent > 0)), 'both_heat_cool'] = True if not df2[df2['both_heat_cool'] == True].empty: error_df_list.append(df2[df2['both_heat_cool'] == True]) if len(error_df_list) > 0: error_df = pd.concat(error_df_list, axis=0)[[ 'site', 'zone', 'room', 'heat_percent', 'cool_percent', 'min_hsp', 'min_csp', 'max_hsp', 'max_csp' ]] error_df.index.name = 'time' error_msgs = error_df.apply(lambda x: get_error_message(x), axis=1).values for msg in error_msgs: print(msg) return error_df else: return pd.DataFrame()
def get_point_class_dict(pymortar_client, building): """ Creates a dictionary of BACnet point names mapped to their respective Brick classes. Points are also grouped by Equipment class. Point names and classes are obtained from Pymortar. e.g. { "AHU": { "AHU-3.RA-T": { "point_name": "AHU-3.RA-T", "point_class": "Return_Air_Temperature_Sensor" }, ....., "VAV": { ..... } } :param pymortar_client: Pymortar client :param building: Name of building that is being analyzed :return: Dictionary of BACnet point names mapped to their respective Brick classes grouped by Equipment class """ v = pymortar.View(name="equipment", definition=""" SELECT ?equipname ?equipclass ?point ?pointclass FROM %s WHERE { ?equipclass rdfs:subClassOf+ brick:Equipment . ?equipname rdf:type ?equipclass . ?equipname bf:hasPoint ?point . ?point rdf:type ?pointclass };""" % building) res = pymortar_client.fetch( pymortar.FetchRequest(sites=[building], views=[v])) eq_view = res.view('equipment') point_class_dict = {} groups = eq_view.groupby('equipclass').groups # Equipment classes that are not being considered and will be excluded from the dictionary # Add to this list of excluded equipment for other buildings excluded_equipment = ['Boiler', 'Chilled_Water_Pump', 'Hot_Water_Pump'] # Clusters all points with subclasses of Fan and Damper (i.e. Exhaust_Fan) clustered_equipment = dict.fromkeys(["Damper", "Fan"], None) clustered = None for equipclass, indexes in groups.items(): if equipclass not in excluded_equipment: for clustered_eq in clustered_equipment: # Union all the clustered equipment if clustered_eq.lower() in equipclass.lower(): clustered = clustered_eq break if clustered: if clustered not in point_class_dict: point_class_dict[clustered] = {} else: if equipclass not in point_class_dict: point_class_dict[equipclass] = {} points = eq_view.iloc[indexes][['point', 'pointclass']].values for p in points: point_name, point_class = p[0], p[1] if clustered: point_class_dict[clustered][point_name] = { "point_name": point_name, "point_class": point_class } else: point_class_dict[equipclass][point_name] = { "point_name": point_name, "point_class": point_class } clustered = None return point_class_dict
request = pymortar.FetchRequest( sites=qualify_resp.sites, views=[ pymortar.View( name="airflow_sensors", definition=air_flow_sensor_query, ), pymortar.View( name="airflow_sps", definition=air_flow_setpoint_query, ) ], dataFrames=[ pymortar.DataFrame(name="sensors", aggregation=pymortar.MEAN, window="30m", timeseries=[ pymortar.Timeseries( view="airflow_sensors", dataVars=["?sensor"], ) ]), pymortar.DataFrame(name="setpoints", aggregation=pymortar.MEAN, window="30m", timeseries=[ pymortar.Timeseries( view="airflow_sps", dataVars=["?sp"], ) ]) ], time=pymortar.TimeParams( start="2018-01-01T00:00:00Z", end="2019-01-01T00:00:00Z", ))
print("running on {0} sites".format(len(qualify_resp.sites))) #### FETCH Stage request = pymortar.FetchRequest( sites=qualify_resp.sites, views=[ # defining relational table for the contents of the query (+site +meter_uuid columns) pymortar.View( name="meters", definition=meter_query, ) ], dataFrames=[ # 15min mean meter data pymortar.DataFrame(name="meters", aggregation=pymortar.MEAN, window="15m", timeseries=[ pymortar.Timeseries(view="meters", dataVars=["?meter"]) ]) ], time=pymortar.TimeParams( start="2016-01-01T00:00:00Z", end="2019-01-01T00:00:00Z", )) resp = client.fetch(request) # compute daily mean energy usage per day of the week for each site
]) req = pymortar.FetchRequest( sites=qualify_resp.sites, views=[ pymortar.View( sites=qualify_resp.sites, name="meter", definition= "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter };", ) ], dataFrames=[ pymortar.DataFrame( name="meter_data", aggregation=pymortar.MEAN, window="1h", timeseries=[ pymortar.Timeseries( view="meter", dataVars=["?meter"], ) ], ) ], time=pymortar.TimeParams( start="2017-01-01T00:00:00Z", end="2018-01-01T00:00:00Z", )) resp = client.fetch(req)
def get_weather(self, site, start, end, point_type='Weather_Temperature_Sensor', agg=pymortar.MEAN, window='15m'): """ Get weather (OAT) data from Mortar. Parameters ---------- site : list(str) List of sites. start : str Start date - 'YYYY-MM-DDTHH:MM:SSZ' end : str End date - 'YYYY-MM-DDTHH:MM:SSZ' point_type : str Type of point, i.e. Weather_Temperature_Sensor... agg : pymortar aggregation object Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored) window : str Size of the moving window. Returns ------- pd.DataFrame(), defaultdict(list) OAT data, dictionary that maps meter data's columns (uuid's) to sitenames. """ # CHECK: Does Mortar take in UTC or local time? # Convert time to UTC start = self.convert_to_utc(start) end = self.convert_to_utc(end) query_oat = "SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:" + point_type + " };" # Get list of sites for OAT data resp_oat = self.client.qualify([query_oat]) if resp_oat.error: raise RuntimeError(resp_oat.error) # Define the view of meters (metadata) oat = pymortar.View(name="view_oat", sites=site, definition=query_oat) # Define the meter timeseries stream data_view_oat = pymortar.DataFrame( name="data_oat", # dataframe column name aggregation=agg, window=window, timeseries=[pymortar.Timeseries(view="view_oat", dataVars=["?t"])]) # Define timeframe time_params = pymortar.TimeParams(start=start, end=end) # Form the full request object request = pymortar.FetchRequest(sites=site, views=[oat], dataFrames=[data_view_oat], time=time_params) # Fetch data from request response = self.client.fetch(request) # resp_meter = (url, uuid, sitename) resp_oat = response.query('select * from view_oat') # Map's uuid's to the site names map_uuid_sitename = defaultdict(list) for (url, uuid, sitename) in resp_oat: map_uuid_sitename[uuid].append(sitename) return response['data_oat'], map_uuid_sitename
def get_tstat(self, site, start, end, agg=pymortar.MAX, window='1m'): """ Get tstat data from Mortar. Parameters ---------- site : list(str) List of sites. start : str Start date - 'YYYY-MM-DDTHH:MM:SSZ' end : str End date - 'YYYY-MM-DDTHH:MM:SSZ' agg : pymortar aggregation object Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored) window : str Size of the moving window. Returns ------- pd.DataFrame() Dataframe containing tstat data for all sites. """ # CHECK: Does Mortar take in UTC or local time? # Convert time to UTC start = self.convert_to_utc(start) end = self.convert_to_utc(end) query_tstat = "SELECT ?tstat ?room ?zone ?state ?temp ?hsp ?csp WHERE { \ ?tstat bf:hasLocation ?room . \ ?zone bf:hasPart ?room . \ ?tstat bf:hasPoint ?state . \ ?tstat bf:hasPoint ?temp . \ ?tstat bf:hasPoint ?hsp . \ ?tstat bf:hasPoint ?csp . \ ?zone rdf:type/rdfs:subClassOf* brick:Zone . \ ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . \ ?state rdf:type/rdfs:subClassOf* brick:Thermostat_Status . \ ?temp rdf:type/rdfs:subClassOf* brick:Temperature_Sensor . \ ?hsp rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint . \ ?csp rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Cooling_Setpoint . \ };" # Get list of sites for tstat data resp_tstat = self.client.qualify([query_tstat]) if resp_tstat.error: raise RuntimeError(resp_tstat.error) # Define the view of tstat (metadata) tstat = pymortar.View(name="view_tstat", sites=site, definition=query_tstat) # Define the meter timeseries stream data_view_tstat = pymortar.DataFrame( name="data_tstat", # dataframe column name aggregation=agg, window=window, timeseries=[ pymortar.Timeseries( view="view_tstat", dataVars=["?state", "?temp", "?hsp", "?csp"]) ]) # Define timeframe time_params = pymortar.TimeParams(start=start, end=end) # Form the full request object request = pymortar.FetchRequest(sites=site, views=[tstat], dataFrames=[data_view_tstat], time=time_params) # Fetch data from request response = self.client.fetch(request) # Final dataframe containing all sites' data df_result = pd.DataFrame() tstat_df = response['data_tstat'] tstats = [ tstat[0] for tstat in response.query("select tstat from view_tstat") ] error_df_list = [] for i, tstat in enumerate(tstats): q = """ SELECT state_uuid, temp_uuid, hsp_uuid, csp_uuid, room, zone, site FROM view_tstat WHERE tstat = "{0}"; """.format(tstat) res = response.query(q) if not res: continue state_col, iat_col, hsp_col, csp_col, room, zone, site = res[0] df = tstat_df[[state_col, iat_col, hsp_col, csp_col]] # A single site has many tstat points. Adding site+str(i) distinguishes each of them. # CHECK: This can have a better naming scheme. df.columns = [ site + str(i) + '_state', site + str(i) + '_iat', site + str(i) + '_hsp', site + str(i) + '_csp' ] df_result = df_result.join(df, how='outer') return df_result
def read_config(): """ Reads config.json file to obtain parameters and fetch data from Mortar. Returns ------- pd.DataFrame(), pd.DataFrame(), default(list), default(list) meter data, occupancy data, map of uuid to meter data, map of uuid to occupancy data """ # Instatiate Client client = pymortar.Client({}) # Query for meter data query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };" # Query for occupancy data query_occupancy = "SELECT ?point WHERE { ?point rdf:type/rdfs:subClassOf* brick:Occupancy_Sensor };" # Get list of sites for meter data and occupancy data resp_meter = client.qualify([query_meter]) resp_occupancy = client.qualify([query_occupancy]) if resp_meter.error or resp_occupancy.error: print("ERORR: ", resp_meter.error if True else resp_occupancy.error) os._exit(0) else: # Get list of sites that are common for meter data and occupancy data common_sites = list( set(resp_meter.sites).intersection(set(resp_occupancy.sites))) # If config['sites'] = "", then default to all sites if not config['sites']: config['sites'] = common_sites else: for site in config['sites']: if site not in common_sites: print('Incorrect site name.') os._exit(0) print("Running on {0} sites".format(len(config['sites']))) # Define the view of meters (metadata) meter = pymortar.View( name="view_meter", sites=config['sites'], definition=query_meter, ) # Define the view of OAT (metadata) occupancy = pymortar.View(name="view_occupancy", sites=config['sites'], definition=query_occupancy) # Define the meter timeseries stream data_view_meter = pymortar.DataFrame( name="data_meter", # dataframe column name aggregation=pymortar.MEAN, window="15m", timeseries=[ pymortar.Timeseries(view="view_meter", dataVars=["?meter"]) ]) # Define the occupancy timeseries stream data_view_occupancy = pymortar.DataFrame( name="data_occupancy", # dataframe column name aggregation=pymortar.RAW, window="", timeseries=[ pymortar.Timeseries(view="view_occupancy", dataVars=["?point"]) ]) # Define timeframe time_params = pymortar.TimeParams(start=config['time']['start'], end=config['time']['end']) # Form the full request object request = pymortar.FetchRequest( sites=config['sites'], views=[meter, occupancy], dataFrames=[data_view_meter, data_view_occupancy], time=time_params) # Fetch data from request response = client.fetch(request) # Save data to csv file if config['save_data']: response['data_meter'].to_csv('meter_data.csv') response['data_occupancy'].to_csv('occupancy_data.csv') # Create results folder if it doesn't exist if not os.path.exists('./' + config['results_folder']): os.mkdir('./' + config['results_folder']) return response
def get_meter_data(pymortar_client, pymortar_objects, site, start, end, point_type="Green_Button_Meter", agg='MEAN', window='15m'): """ Get meter data from pymortar. Parameters ---------- pymortar_client : pymortar.Client({}) Pymortar Client Object. pymortar_objects : dict Dictionary that maps aggregation values to corresponding pymortar objects. site : str Building name. start : str Start date - 'YYYY-MM-DDTHH:MM:SSZ' end : str End date - 'YYYY-MM-DDTHH:MM:SSZ' point_type : str Type of data, i.e. Green_Button_Meter, Building_Electric_Meter... agg : str Values include MEAN, MAX, MIN, COUNT, SUM, RAW (the temporal window parameter is ignored) window : str Size of the moving window. Returns ------- pd.DataFrame(), defaultdict(list) Meter data, dictionary that maps meter data's columns (uuid's) to sitenames. """ agg = pymortar_objects.get(agg, 'ERROR') if agg == 'ERROR': raise ValueError( 'Invalid aggregate type; should be string and in caps; values include: ' + pymortar_objects.keys()) query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };" # Define the view of meters (metadata) meter = pymortar.View(name="view_meter", sites=[site], definition=query_meter) # Define the meter timeseries stream data_view_meter = pymortar.DataFrame( name="data_meter", # dataframe column name aggregation=agg, window=window, timeseries=[ pymortar.Timeseries(view="view_meter", dataVars=["?meter"]) ]) # Define timeframe time_params = pymortar.TimeParams(start=start, end=end) # Form the full request object request = pymortar.FetchRequest(sites=[site], views=[meter], dataFrames=[data_view_meter], time=time_params) # Fetch data from request response = pymortar_client.fetch(request) # resp_meter = (url, uuid, sitename) resp_meter = response.query('select * from view_meter') # Map's uuid's to the site names map_uuid_sitename = defaultdict(list) for (url, uuid, sitename) in resp_meter: map_uuid_sitename[uuid].append(sitename) return response['data_meter'], map_uuid_sitename
def get_meter_data(self, site, start, end, point_type="Green_Button_Meter", agg='MEAN', window='15m'): """ Get meter data from Mortar. Parameters ---------- site : list(str) List of sites. start : str Start date - 'YYYY-MM-DDTHH:MM:SSZ' end : str End date - 'YYYY-MM-DDTHH:MM:SSZ' point_type : str Type of data, i.e. Green_Button_Meter, Building_Electric_Meter... agg : pymortar aggregation object Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored) window : str Size of the moving window. Returns ------- pd.DataFrame(), defaultdict(list) Meter data, dictionary that maps meter data's columns (uuid's) to sitenames. """ # CHECK: Hacky code. Change this later if agg == 'MEAN': agg = pymortar.MEAN # CHECK: Does Mortar take in UTC or local time? # Convert time to UTC start = self.convert_to_utc(start) end = self.convert_to_utc(end) query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };" # Define the view of meters (metadata) meter = pymortar.View(name="view_meter", sites=site, definition=query_meter) # Define the meter timeseries stream data_view_meter = pymortar.DataFrame( name="data_meter", # dataframe column name aggregation=agg, window=window, timeseries=[ pymortar.Timeseries(view="view_meter", dataVars=["?meter"]) ]) # Define timeframe time_params = pymortar.TimeParams(start=start, end=end) # Form the full request object request = pymortar.FetchRequest(sites=site, views=[meter], dataFrames=[data_view_meter], time=time_params) # Fetch data from request response = self.client.fetch(request) # resp_meter = (url, uuid, sitename) resp_meter = response.query('select * from view_meter') # Map's uuid's to the site names map_uuid_sitename = defaultdict(list) for (url, uuid, sitename) in resp_meter: map_uuid_sitename[uuid].append(sitename) return response['data_meter'], map_uuid_sitename
datetime.datetime.strptime("20/04/2019 01:00:00", "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9) end = int( time.mktime( datetime.datetime.strptime("27/04/2019 23:59:59", "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9) print(start) print(end) print(uuid) price_stream = pymortar.DataFrame(name="price_data", uuids=[uuid], aggregation=pymortar.MEAN, window="1h") price_time_params = pymortar.TimeParams( start=rfc3339(int(start / 1e9 - start / 1e9 % 3600)), end=rfc3339(int(end / 1e9)), ) price_request = pymortar.FetchRequest(sites=[""], dataFrames=[price_stream], time=price_time_params) df = pymortar_client.fetch(price_request)["price_data"] df.to_csv("new.csv") print(df)
def ahu_analysis(client, start_time, end_time): st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ") et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ") query = """SELECT ?cooling_point ?heating_point ?ahu WHERE { ?cooling_point rdf:type/rdfs:subClassOf* brick:Cooling_Valve_Command . ?heating_point rdf:type/rdfs:subClassOf* brick:Heating_Valve_Command . ?ahu bf:hasPoint ?cooling_point . ?ahu bf:hasPoint ?heating_point . };""" resp = client.qualify([query]) if resp.error != "": print("ERROR: ", resp.error) points_view = pymortar.View( sites=resp.sites, name="point_type_data", definition=query, ) point_streams = pymortar.DataFrame( name="points_data", aggregation=pymortar.MAX, window="15m", timeseries=[ pymortar.Timeseries(view="point_type_data", dataVars=["?cooling_point", "?heating_point"]) ]) time_params = pymortar.TimeParams(start=st, end=et) request = pymortar.FetchRequest( sites=resp.sites, views=[points_view], time=time_params, dataFrames=[point_streams], ) response = client.fetch(request) ahu_df = response["points_data"] ahus = [ ahu[0] for ahu in response.query("select ahu from point_type_data") ] error_df_list = [] for ahu in ahus: heat_cool_query = """ SELECT cooling_point_uuid, heating_point_uuid, site FROM point_type_data WHERE ahu = "{0}"; """.format(ahu) res = response.query(heat_cool_query) cooling_uuid = res[0][0] heating_uuid = res[0][1] site = res[0][2] df = response["points_data"][[cooling_uuid, heating_uuid]].dropna() df.columns = ['cooling', 'heating'] df['site'] = site df['ahu'] = ahu.split('#')[1] df['simultaneous_heat_cool'] = False df.loc[((df.cooling > 0) & (df.heating > 0)), 'simultaneous_heat_cool'] = True if not df[df['simultaneous_heat_cool'] == True].empty: error_df_list.append(df[df['simultaneous_heat_cool'] == True]) if len(error_df_list) > 0: error_df = pd.concat(error_df_list, axis=0)[['site', 'ahu']] error_df.index.name = 'time' error_msgs = error_df.apply(lambda x: get_error_message(x), axis=1).values for msg in error_msgs: print(msg) return error_df else: return pd.DataFrame()
# define the meter timeseries streams we want meter_data = pymortar.DataFrame( name="meters", aggregation=pymortar.MEAN, window="15m", timeseries=[pymortar.Timeseries(view="meters", dataVars=["?meter"])]) # temporal parameters for the query: 2017-2018 @ 15min mean time_params = pymortar.TimeParams( start="2016-01-01T00:00:00Z", end="2018-01-01T00:00:00Z", ) # form the full request object request = pymortar.FetchRequest(sites=resp.sites, views=[meters], dataFrames=[meter_data], time=time_params) # download the data print("Starting to download data...") data = client.fetch(request) # compute min/max/mean for each site # TODO: make this daily ranges = [] for site in resp.sites: meter_uuids = data.query( "select meter_uuid from meters where site='{0}'".format(site)) meter_uuids = [row[0] for row in meter_uuids] meterdf = data['meters'][meter_uuids].sum(axis=1) ranges.append([site, meterdf.min(), meterdf.max(), meterdf.mean()])
qualify_resp = c.qualify([air_temp_sensor_query, air_temp_setpoint_query]) print(qualify_resp) print("running on {0} sites".format(len(qualify_resp.sites))) request = pymortar.FetchRequest(sites=qualify_resp.sites, views=[ pymortar.View( name="airtemp_sensors", definition=air_temp_sensor_query, ), pymortar.View( name="airtemp_sps", definition=air_temp_setpoint_query, ) ], dataFrames=[ pymortar.DataFrame( name="data1", aggregation=pymortar.MAX, window="15m", timeseries=[ pymortar.Timeseries( view="airtemp_sensors", dataVars=["?sensor"], ) ]) ]) (views, metadata, dataframes) = c.fetch(request) print(views) print(metadata) print(dataframes)