Beispiel #1
0
def get_power(site, start, end, agg, window, cli):

    eagle_power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Building_Electric_Meter
        };"""

    gb_power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Green_Button_Meter
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request_gb = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=gb_power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    request_eagle = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=eagle_power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    result_gb = cli.fetch(request_gb)
    result_eagle = cli.fetch(request_eagle)
    try:
        power_gb = result_gb[
            'power'] * 4000  #adjusts to from energy to power (15 min period), and from kw to w
        power_eagle = adjust(result_eagle['power'])
        power_eagle.columns = [power_gb.columns[0]]
        power = power_gb.fillna(
            value=power_eagle
        )  # power uses available gb data, fills NA with eagle data
    except:
        if np.size(result_gb['power']) > 1:
            power = result_gb['power'] * 4000
        elif np.size(result_eagle['power']) > 1:
            power = adjust(result_eagle['power'])
        else:
            print("no data")

    return power
Beispiel #2
0
def get_weather(site, start, end, agg, window, cli):
    weather_query = """SELECT ?t WHERE {
            ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='weather', definition=weather_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='weather',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='weather',
                                                       dataVars=['?t'])
                               ])
        ])
    result = cli.fetch(request)
    return result['weather']
Beispiel #3
0
def get_power(site, start, end, agg, window, cli):
    power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Green_Button_Meter
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    result = cli.fetch(request)
    return result['power']
Beispiel #4
0
                                time=timeparams)

    return client.fetch(req)


meter_view = pymortar.View(
    name="meters",
    definition="""SELECT ?meter WHERE {
            ?meter rdf:type brick:Building_Electric_Meter
        };""",
)
meter_df = pymortar.DataFrame(
    name="meters",
    aggregation=pymortar.MEAN,
    timeseries=[pymortar.Timeseries(
        view="meters",
        dataVars=['?meter'],
    )])

tstats_view = pymortar.View(
    name="tstats",
    definition="""SELECT ?rtu ?zone ?tstat ?csp ?hsp ?temp ?state WHERE {
      ?rtu rdf:type brick:RTU .
      ?tstat bf:controls ?rtu .
      ?rtu bf:feeds ?zone .
      ?tstat bf:hasPoint ?temp .
      ?temp rdf:type/rdfs:subClassOf* brick:Temperature_Sensor .

      ?tstat bf:hasPoint ?csp .
      ?csp rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint .

      ?tstat bf:hasPoint ?hsp .
    def get_meter_data(self,
                       site,
                       start,
                       end,
                       point_type="Green_Button_Meter",
                       agg='MEAN',
                       window='15m'):
        """ Get meter data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        point_type      : str
            Type of data, i.e. Green_Button_Meter, Building_Electric_Meter...
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame(), defaultdict(list)
            Meter data, dictionary that maps meter data's columns (uuid's) to sitenames.

        """

        # CHECK: Hacky code. Change this later
        if agg == 'MEAN':
            agg = pymortar.MEAN

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };"

        # Define the view of meters (metadata)
        meter = pymortar.View(name="view_meter",
                              sites=site,
                              definition=query_meter)

        # Define the meter timeseries stream
        data_view_meter = pymortar.DataFrame(
            name="data_meter",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[
                pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
            ])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[meter],
                                        dataFrames=[data_view_meter],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # resp_meter = (url, uuid, sitename)
        resp_meter = response.query('select * from view_meter')

        # Map's uuid's to the site names
        map_uuid_sitename = defaultdict(list)
        for (url, uuid, sitename) in resp_meter:
            map_uuid_sitename[uuid].append(sitename)

        return response['data_meter'], map_uuid_sitename
Beispiel #6
0
def ahu_analysis(client, start_time, end_time):
    st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ")

    query = """SELECT ?cooling_point ?heating_point ?ahu WHERE {
        ?cooling_point rdf:type/rdfs:subClassOf* brick:Cooling_Valve_Command .
        ?heating_point rdf:type/rdfs:subClassOf* brick:Heating_Valve_Command .
        ?ahu bf:hasPoint ?cooling_point .
        ?ahu bf:hasPoint ?heating_point .
    };"""

    resp = client.qualify([query])
    if resp.error != "":
        print("ERROR: ", resp.error)

    points_view = pymortar.View(
        sites=resp.sites,
        name="point_type_data",
        definition=query,
    )

    point_streams = pymortar.DataFrame(
        name="points_data",
        aggregation=pymortar.MAX,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="point_type_data",
                                dataVars=["?cooling_point", "?heating_point"])
        ])

    time_params = pymortar.TimeParams(start=st, end=et)

    request = pymortar.FetchRequest(
        sites=resp.sites,
        views=[points_view],
        time=time_params,
        dataFrames=[point_streams],
    )

    response = client.fetch(request)

    ahu_df = response["points_data"]
    ahus = [
        ahu[0] for ahu in response.query("select ahu from point_type_data")
    ]

    error_df_list = []
    for ahu in ahus:
        heat_cool_query = """
            SELECT cooling_point_uuid, heating_point_uuid, site
            FROM point_type_data
            WHERE ahu = "{0}";
        """.format(ahu)
        res = response.query(heat_cool_query)
        cooling_uuid = res[0][0]
        heating_uuid = res[0][1]
        site = res[0][2]
        df = response["points_data"][[cooling_uuid, heating_uuid]].dropna()
        df.columns = ['cooling', 'heating']
        df['site'] = site
        df['ahu'] = ahu.split('#')[1]
        df['simultaneous_heat_cool'] = False
        df.loc[((df.cooling > 0) & (df.heating > 0)),
               'simultaneous_heat_cool'] = True
        if not df[df['simultaneous_heat_cool'] == True].empty:
            error_df_list.append(df[df['simultaneous_heat_cool'] == True])

    if len(error_df_list) > 0:
        error_df = pd.concat(error_df_list, axis=0)[['site', 'ahu']]
        error_df.index.name = 'time'
        error_msgs = error_df.apply(lambda x: get_error_message(x),
                                    axis=1).values
        for msg in error_msgs:
            print(msg)

        return error_df
    else:
        return pd.DataFrame()
Beispiel #7
0
def read_config():
    """ Reads config.json file to obtain parameters and fetch data from Mortar.

	Returns
	-------
	pd.DataFrame(), pd.DataFrame(), default(list), default(list)
		meter data, occupancy data, map of uuid to meter data, map of uuid to occupancy data
	
	"""

    # Instatiate Client
    client = pymortar.Client({})

    # Query for meter data
    query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };"

    # Query for occupancy data
    query_occupancy = "SELECT ?point WHERE { ?point rdf:type/rdfs:subClassOf* brick:Occupancy_Sensor };"

    # Get list of sites for meter data and occupancy data
    resp_meter = client.qualify([query_meter])
    resp_occupancy = client.qualify([query_occupancy])

    if resp_meter.error or resp_occupancy.error:
        print("ERORR: ", resp_meter.error if True else resp_occupancy.error)
        os._exit(0)
    else:

        # Get list of sites that are common for meter data and occupancy data
        common_sites = list(
            set(resp_meter.sites).intersection(set(resp_occupancy.sites)))

        # If config['sites'] = "", then default to all sites
        if not config['sites']:
            config['sites'] = common_sites
        else:
            for site in config['sites']:
                if site not in common_sites:
                    print('Incorrect site name.')
                    os._exit(0)
            print("Running on {0} sites".format(len(config['sites'])))

    # Define the view of meters (metadata)
    meter = pymortar.View(
        name="view_meter",
        sites=config['sites'],
        definition=query_meter,
    )

    # Define the view of OAT (metadata)
    occupancy = pymortar.View(name="view_occupancy",
                              sites=config['sites'],
                              definition=query_occupancy)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define the occupancy timeseries stream
    data_view_occupancy = pymortar.DataFrame(
        name="data_occupancy",  # dataframe column name
        aggregation=pymortar.RAW,
        window="",
        timeseries=[
            pymortar.Timeseries(view="view_occupancy", dataVars=["?point"])
        ])

    # Define timeframe
    time_params = pymortar.TimeParams(start=config['time']['start'],
                                      end=config['time']['end'])

    # Form the full request object
    request = pymortar.FetchRequest(
        sites=config['sites'],
        views=[meter, occupancy],
        dataFrames=[data_view_meter, data_view_occupancy],
        time=time_params)

    # Fetch data from request
    response = client.fetch(request)

    # Save data to csv file
    if config['save_data']:
        response['data_meter'].to_csv('meter_data.csv')
        response['data_occupancy'].to_csv('occupancy_data.csv')

    # Create results folder if it doesn't exist
    if not os.path.exists('./' + config['results_folder']):
        os.mkdir('./' + config['results_folder'])

    return response
Beispiel #8
0
qualify_resp = c.qualify([air_temp_sensor_query, air_temp_setpoint_query])
print(qualify_resp)
print("running on {0} sites".format(len(qualify_resp.sites)))
request = pymortar.FetchRequest(sites=qualify_resp.sites,
                                views=[
                                    pymortar.View(
                                        name="airtemp_sensors",
                                        definition=air_temp_sensor_query,
                                    ),
                                    pymortar.View(
                                        name="airtemp_sps",
                                        definition=air_temp_setpoint_query,
                                    )
                                ],
                                dataFrames=[
                                    pymortar.DataFrame(
                                        name="data1",
                                        aggregation=pymortar.MAX,
                                        window="15m",
                                        timeseries=[
                                            pymortar.Timeseries(
                                                view="airtemp_sensors",
                                                dataVars=["?sensor"],
                                            )
                                        ])
                                ])
(views, metadata, dataframes) = c.fetch(request)
print(views)
print(metadata)
print(dataframes)