Ejemplo n.º 1
0
def get_power(site, start, end, agg, window, cli):

    eagle_power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Building_Electric_Meter
        };"""

    gb_power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Green_Button_Meter
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request_gb = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=gb_power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    request_eagle = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=eagle_power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    result_gb = cli.fetch(request_gb)
    result_eagle = cli.fetch(request_eagle)
    try:
        power_gb = result_gb[
            'power'] * 4000  #adjusts to from energy to power (15 min period), and from kw to w
        power_eagle = adjust(result_eagle['power'])
        power_eagle.columns = [power_gb.columns[0]]
        power = power_gb.fillna(
            value=power_eagle
        )  # power uses available gb data, fills NA with eagle data
    except:
        if np.size(result_gb['power']) > 1:
            power = result_gb['power'] * 4000
        elif np.size(result_eagle['power']) > 1:
            power = adjust(result_eagle['power'])
        else:
            print("no data")

    return power
Ejemplo n.º 2
0
Archivo: server.py Proyecto: kuzha/XBOS
def get_mortar_oat_building(building, start, end, window, agg,
                            pymortar_client):
    outside_temperature_query = """SELECT ?temp WHERE {
        ?temp rdf:type brick:Weather_Temperature_Sensor
    };"""

    weather_stations_view = pymortar.View(name='weather_stations_view',
                                          sites=[building],
                                          definition=outside_temperature_query)
    weather_stations_stream = pymortar.DataFrame(
        name='weather_stations',
        aggregation=agg,
        window=str(int(window)) + 's',
        timeseries=[
            pymortar.Timeseries(view='weather_stations_view',
                                dataVars=['?temp'])
        ])
    weather_stations_time_params = pymortar.TimeParams(
        start=rfc3339(start),
        end=rfc3339(end),
    )

    request = pymortar.FetchRequest(sites=[building],
                                    views=[weather_stations_view],
                                    dataFrames=[weather_stations_stream],
                                    time=weather_stations_time_params)
    outside_temperature_data = pymortar_client.fetch(
        request)['weather_stations']
    if outside_temperature_data is None:
        return None, "did not fetch data from pymortar with query: %s" % outside_temperature_query

    return outside_temperature_data, None
Ejemplo n.º 3
0
def get_greenbutton_id(site, use_TED_meter=False):
    if use_TED_meter or site in TED_meters:
        power_query = """SELECT ?meter WHERE {
            ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter
        };"""
    else:
        power_query = """SELECT ?meter WHERE {
                ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter
            };"""
    query_agg = pymortar.MAX
    start = '2019-01-01T00:00:00-08:00'
    end = '2019-01-02T00:00:00-08:00'
    request = pymortar.FetchRequest(
        sites=[site],
        views = [
            pymortar.View(name='power', definition=power_query)
        ],
        time = pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(
            name='power',
            aggregation=query_agg,
            window='24h',
            timeseries=[
                pymortar.Timeseries(
                    view='power',
                    dataVars=['?meter'])
            ])
        ]
    )
    result = client.fetch(request)
    return result['power'].columns[0]
Ejemplo n.º 4
0
Archivo: server.py Proyecto: kuzha/XBOS
def _get_raw_actions(building, zone, pymortar_client, start, end, window_size, aggregation):
    """
    TODO how to deal with windows in which two different actions are performed in given zone.
    Note: GETS THE MAX ACTION IN GIVEN INTERVAL.
    :param building:
    :param zone:
    :param pymortar_client:
    :param start: datetime, timezone aware, rfc3339
    :param end: datetime, timezone aware, rfc3339
    :param window_size: string with [s, m, h, d] classified in the end. e.g. "1s" for one second.
    :return:
    """
    thermostat_action_query = """SELECT ?tstat ?status_point WHERE {
            ?tstat rdf:type brick:Thermostat .
            ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
            ?tstat bf:hasPoint ?status_point .
            ?status_point rdf:type brick:Thermostat_Status .
        };""" % (building, zone)

    # resp = pymortar_client.qualify([thermostat_action_query]) Needed to get list of all sites

    thermostat_action_view = pymortar.View(
        name="thermostat_action_view",
        sites=[building],
        definition=thermostat_action_query,
    )

    thermostat_action_stream = pymortar.DataFrame(
        name="thermostat_action",
        aggregation=aggregation,
        window=window_size,
        timeseries=[
            pymortar.Timeseries(
                view="thermostat_action_view",
                dataVars=["?status_point"],
            )
        ]
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            thermostat_action_view
        ],
        dataFrames=[
            thermostat_action_stream
        ],
        time=pymortar.TimeParams(
            start=rfc3339(start),
            end=rfc3339(end),
        )
    )

    thermostat_action_data = pymortar_client.fetch(request)["thermostat_action"]

    if thermostat_action_data is None:
        return None, "did not fetch data from pymortar with query: %s" % thermostat_action_query

    return thermostat_action_data, None
Ejemplo n.º 5
0
Archivo: server.py Proyecto: kuzha/XBOS
def _get_raw_indoor_temperatures(building, zone, pymortar_client, start, end, window_size, aggregation):
    """
    :param building:
    :param zone:
    :param pymortar_client:
    :param start: datetime, timezone aware, rfc3339
    :param end: datetime, timezone aware, rfc3339
    :param window_size:
    :return:
    """
    temperature_query = """SELECT ?tstat ?temp WHERE {
                ?tstat rdf:type brick:Thermostat .
                ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
                ?tstat bf:hasPoint ?temp .
                ?temp  rdf:type brick:Temperature_Sensor  .
            };""" % (building, zone)

    #resp = pymortar_client.qualify([temperature_query]) Need to get list of all sites

    temperature_view = pymortar.View(
        name="temperature_view",
        sites=[building],
        definition=temperature_query,
    )

    temperature_stream = pymortar.DataFrame(
        name="temperature",
        aggregation=aggregation,
        window=window_size,
        timeseries=[
            pymortar.Timeseries(
                view="temperature_view",
                dataVars=["?temp"],
            )
        ]
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            temperature_view
        ],
        dataFrames=[
            temperature_stream
        ],
        time=pymortar.TimeParams(
            start=rfc3339(start),
            end=rfc3339(end),
        )
    )

    temperature_data = pymortar_client.fetch(request)["temperature"]

    if temperature_data is None:
        return None, "did not fetch data from pymortar with query: %s" % temperature_query

    return temperature_data, None
Ejemplo n.º 6
0
Archivo: server.py Proyecto: kuzha/XBOS
def _get_raw_temperature_bands(building, zone, pymortar_client, start, end, window_size, aggregation):
    """
    :param building:
    :param zone:
    :param pymortar_client:
    :param start: datetime, timezone aware, rfc3339
    :param end: datetime, timezone aware, rfc3339
    :param window_size:
    :return:
    """
    temperature_bands_query = """
        SELECT ?tstat ?heating_setpoint ?cooling_setpoint WHERE {
        ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
        ?tstat bf:hasPoint ?heating_setpoint .
        ?tstat bf:hasPoint ?cooling_setpoint .
        ?heating_setpoint rdf:type brick:Supply_Air_Temperature_Heating_Setpoint .
        ?cooling_setpoint rdf:type brick:Supply_Air_Temperature_Cooling_Setpoint
    };""" % (building, zone)

    temperature_bands_view = pymortar.View(
        name="temperature_bands_view",
        sites=[building],
        definition=temperature_bands_query,
    )

    temperature_bands_stream = pymortar.DataFrame(
        name="temperature_bands",
        aggregation=aggregation,
        window=window_size,
        timeseries=[
            pymortar.Timeseries(
                view="temperature_bands_view",
                dataVars=["?heating_setpoint", "?cooling_setpoint"],
            )
        ]
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            temperature_bands_view
        ],
        dataFrames=[
            temperature_bands_stream
        ],
        time=pymortar.TimeParams(
            start=rfc3339(start),
            end=rfc3339(end),
        )
    )

    temperature_bands_data = pymortar_client.fetch(request)["temperature_bands"]

    if temperature_bands_data is None:
        return None, "did not fetch data from pymortar with query: %s" % temperature_bands_query

    return temperature_bands_data, None
Ejemplo n.º 7
0
def get_indoor_temp_data(building, zone, window):
    start = int(
        time.mktime(
            datetime.datetime.strptime("30/09/2018 0:00:00",
                                       "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9)
    end = int(
        time.mktime(
            datetime.datetime.strptime("1/10/2018 0:00:00",
                                       "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9)
    start = datetime.datetime.utcfromtimestamp(float(
        start / 1e9)).replace(tzinfo=pytz.utc)
    end = datetime.datetime.utcfromtimestamp(float(
        end / 1e9)).replace(tzinfo=pytz.utc)

    temperature_query = """SELECT ?tstat ?temp WHERE {
                    ?tstat rdf:type brick:Thermostat .
                    ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
                    ?tstat bf:hasPoint ?temp .
                    ?temp  rdf:type brick:Temperature_Sensor  .
                };""" % (building, zone)

    #resp = pymortar_client.qualify([temperature_query]) Need to get list of all sites

    temperature_view = pymortar.View(
        name="temperature_view",
        sites=[building],
        definition=temperature_query,
    )

    temperature_stream = pymortar.DataFrame(name="temperature",
                                            aggregation=pymortar.MEAN,
                                            window=window,
                                            timeseries=[
                                                pymortar.Timeseries(
                                                    view="temperature_view",
                                                    dataVars=["?temp"],
                                                )
                                            ])

    request = pymortar.FetchRequest(sites=[building],
                                    views=[temperature_view],
                                    dataFrames=[temperature_stream],
                                    time=pymortar.TimeParams(
                                        start=rfc3339(start),
                                        end=rfc3339(end),
                                    ))

    temperature_data = pymortar_client.fetch(request)

    print(temperature_data["temperature"])

    return temperature_data
Ejemplo n.º 8
0
def get_from_pymortar(start, end, uuid, pymortar_client):

    price_stream = pymortar.DataFrame(name="price_data",
                                      uuids=[uuid],
                                      aggregation=pymortar.MEAN,
                                      window="1h")

    price_time_params = pymortar.TimeParams(
        start=rfc3339(start),
        end=rfc3339(end),
    )

    price_request = pymortar.FetchRequest(sites=[""],
                                          dataFrames=[price_stream],
                                          time=price_time_params)

    return pymortar_client.fetch(price_request)["price_data"]
Ejemplo n.º 9
0
def get_outside_temp_data(building):

    interval = 3600

    outside_temperature_query = """SELECT ?temp WHERE {
        ?temp rdf:type brick:Weather_Temperature_Sensor .
    };"""

    weather_stations_view = pymortar.View(
        name="weather_stations_view",
        sites=[building],
        definition=outside_temperature_query,
    )

    weather_stations_stream = pymortar.DataFrame(
        name="weather_stations",
        aggregation=pymortar.MEAN,
        window=str(int(interval)) + 's',
        timeseries=[
            pymortar.Timeseries(
                view="weather_stations_view",
                dataVars=["?temp"],
            )
        ]
    )

    weather_stations_time_params = pymortar.TimeParams(
        start=rfc3339(start),
        end=rfc3339(end),
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            weather_stations_view
        ],
        dataFrames=[
            weather_stations_stream
        ],
        time=weather_stations_time_params
    )

    df = outside_temperature_data = pymortar_client.fetch(request)['weather_stations']

    return df
Ejemplo n.º 10
0
def get_power(site, start, end, agg, window, cli):
    power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Green_Button_Meter
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    result = cli.fetch(request)
    return result['power']
Ejemplo n.º 11
0
def get_weather(site, start, end, agg, window, cli):
    weather_query = """SELECT ?t WHERE {
            ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='weather', definition=weather_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='weather',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='weather',
                                                       dataVars=['?t'])
                               ])
        ])
    result = cli.fetch(request)
    return result['weather']
Ejemplo n.º 12
0
Archivo: server.py Proyecto: kuzha/XBOS
def get_mortar_oat_uuid(uuid, start, end, window, agg, pymortar_client):
    oat_df = pymortar.DataFrame(
        name="weather_stations",
        uuids=[uuid],
        aggregation=agg,
        window=str(int(window)) + 's',
    )

    oat_time_params = pymortar.TimeParams(
        start=rfc3339(start),
        end=rfc3339(end),
    )

    oat_request = pymortar.FetchRequest(sites=[""],
                                        dataFrames=[oat_df],
                                        time=oat_time_params)
    outside_temperature_data = pymortar_client.fetch(
        oat_request)['weather_stations']
    if outside_temperature_data is None:
        return None, "did not fetch data from pymortar for uuid: %s" % uuid

    return outside_temperature_data, None
Ejemplo n.º 13
0
    def get_tstat(self, site, start, end, agg=pymortar.MAX, window='1m'):
        """ Get tstat data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame()
            Dataframe containing tstat data for all sites.

        """

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_tstat = "SELECT ?tstat ?room ?zone ?state ?temp ?hsp ?csp WHERE { \
            ?tstat bf:hasLocation ?room . \
            ?zone bf:hasPart ?room . \
            ?tstat bf:hasPoint ?state . \
            ?tstat bf:hasPoint ?temp . \
            ?tstat bf:hasPoint ?hsp . \
            ?tstat bf:hasPoint ?csp . \
            ?zone rdf:type/rdfs:subClassOf* brick:Zone . \
            ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . \
            ?state rdf:type/rdfs:subClassOf* brick:Thermostat_Status . \
            ?temp  rdf:type/rdfs:subClassOf* brick:Temperature_Sensor  . \
            ?hsp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint . \
            ?csp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Cooling_Setpoint . \
        };"

        # Get list of sites for tstat data
        resp_tstat = self.client.qualify([query_tstat])

        if resp_tstat.error:
            raise RuntimeError(resp_tstat.error)

        # Define the view of tstat (metadata)
        tstat = pymortar.View(name="view_tstat",
                              sites=site,
                              definition=query_tstat)

        # Define the meter timeseries stream
        data_view_tstat = pymortar.DataFrame(
            name="data_tstat",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[
                pymortar.Timeseries(
                    view="view_tstat",
                    dataVars=["?state", "?temp", "?hsp", "?csp"])
            ])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[tstat],
                                        dataFrames=[data_view_tstat],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # Final dataframe containing all sites' data
        df_result = pd.DataFrame()

        tstat_df = response['data_tstat']
        tstats = [
            tstat[0]
            for tstat in response.query("select tstat from view_tstat")
        ]
        error_df_list = []

        for i, tstat in enumerate(tstats):

            q = """
                SELECT state_uuid, temp_uuid, hsp_uuid, csp_uuid, room, zone, site
                FROM view_tstat
                WHERE tstat = "{0}";
            """.format(tstat)

            res = response.query(q)
            if not res:
                continue

            state_col, iat_col, hsp_col, csp_col, room, zone, site = res[0]
            df = tstat_df[[state_col, iat_col, hsp_col, csp_col]]

            # A single site has many tstat points. Adding site+str(i) distinguishes each of them.
            # CHECK: This can have a better naming scheme.
            df.columns = [
                site + str(i) + '_state', site + str(i) + '_iat',
                site + str(i) + '_hsp', site + str(i) + '_csp'
            ]

            df_result = df_result.join(df, how='outer')

        return df_result
Ejemplo n.º 14
0
def tstat_zone_analysis(client, resample_minutes, start_time, end_time):
    st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    print(st)
    print(et)

    tstat_query = """
        SELECT ?tstat ?room ?zone ?state ?temp ?hsp ?csp WHERE {
            ?tstat bf:hasLocation ?room .
            ?zone bf:hasPart ?room .

            ?tstat bf:hasPoint ?state .
            ?tstat bf:hasPoint ?temp .
            ?tstat bf:hasPoint ?hsp .
            ?tstat bf:hasPoint ?csp .

            ?zone rdf:type/rdfs:subClassOf* brick:Zone .
            ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat .
            ?state rdf:type/rdfs:subClassOf* brick:Thermostat_Status .
            ?temp  rdf:type/rdfs:subClassOf* brick:Temperature_Sensor  .
            ?hsp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint .
            ?csp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Cooling_Setpoint .
        };
    """
    qualify_response = client.qualify([tstat_query])
    if qualify_response.error != "":
        print("ERROR: ", qualify_response.error)
        os.exit(1)

    print("Running on {0} sites".format(len(qualify_response.sites)))

    tstat_view = pymortar.View(
        name="tstat_points",
        sites=qualify_response.sites,
        definition=tstat_query,
    )

    tstat_streams = pymortar.DataFrame(
        name="thermostat_data",
        aggregation=pymortar.MAX,
        window="1m",
        timeseries=[
            pymortar.Timeseries(view="tstat_points",
                                dataVars=["?state", "?temp", "?hsp", "?csp"])
        ])

    time_params = pymortar.TimeParams(start=st, end=et)

    request = pymortar.FetchRequest(
        sites=qualify_response.sites,  # from our call to Qualify
        views=[tstat_view],
        dataFrames=[tstat_streams],
        time=time_params)
    result = client.fetch(request)

    tstat_df = result['thermostat_data']
    tstats = [
        tstat[0] for tstat in result.query("select tstat from tstat_points")
    ]

    error_df_list = []
    for tstat in tstats:
        q = """
                SELECT state_uuid, temp_uuid, hsp_uuid, csp_uuid, room, zone, site
                FROM tstat_points
                WHERE tstat = "{0}";
            """.format(tstat)
        res = result.query(q)

        if len(res) == 0:
            continue

        state_col, iat_col, hsp_col, csp_col, room, zone, site = res[0]
        df = tstat_df[[state_col, iat_col, hsp_col, csp_col]]
        df.columns = ['state', 'iat', 'hsp', 'csp']

        df2 = pd.DataFrame()
        resample_time = '{0}T'.format(resample_minutes)
        df2['min_hsp'] = df['hsp'].resample(resample_time).min()
        df2['min_csp'] = df['csp'].resample(resample_time).min()
        df2['max_hsp'] = df['hsp'].resample(resample_time).max()
        df2['max_csp'] = df['csp'].resample(resample_time).max()

        df2['heat_percent'] = df['state'].resample(resample_time).apply(
            lambda x: ((x == 1).sum() +
                       (x == 4).sum()) / resample_minutes * 100)
        df2['cool_percent'] = df['state'].resample(resample_time).apply(
            lambda x: ((x == 2).sum() +
                       (x == 5).sum()) / resample_minutes * 100)

        df2['tstat'] = tstat
        df2['room'] = room.split('#')[1]
        df2['zone'] = zone.split('#')[1]
        df2['site'] = site

        df2['both_heat_cool'] = False
        df2.loc[((df2.heat_percent > 0) & (df2.cool_percent > 0)),
                'both_heat_cool'] = True
        if not df2[df2['both_heat_cool'] == True].empty:
            error_df_list.append(df2[df2['both_heat_cool'] == True])

    if len(error_df_list) > 0:
        error_df = pd.concat(error_df_list, axis=0)[[
            'site', 'zone', 'room', 'heat_percent', 'cool_percent', 'min_hsp',
            'min_csp', 'max_hsp', 'max_csp'
        ]]
        error_df.index.name = 'time'
        error_msgs = error_df.apply(lambda x: get_error_message(x),
                                    axis=1).values
        for msg in error_msgs:
            print(msg)

        return error_df
    else:
        return pd.DataFrame()
Ejemplo n.º 15
0
def read_config():
    """ Reads config.json file to obtain parameters and fetch data from Mortar.

	Returns
	-------
	pd.DataFrame(), pd.DataFrame(), default(list), default(list)
		meter data, occupancy data, map of uuid to meter data, map of uuid to occupancy data
	
	"""

    # Instatiate Client
    client = pymortar.Client({})

    # Query for meter data
    query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };"

    # Query for occupancy data
    query_occupancy = "SELECT ?point WHERE { ?point rdf:type/rdfs:subClassOf* brick:Occupancy_Sensor };"

    # Get list of sites for meter data and occupancy data
    resp_meter = client.qualify([query_meter])
    resp_occupancy = client.qualify([query_occupancy])

    if resp_meter.error or resp_occupancy.error:
        print("ERORR: ", resp_meter.error if True else resp_occupancy.error)
        os._exit(0)
    else:

        # Get list of sites that are common for meter data and occupancy data
        common_sites = list(
            set(resp_meter.sites).intersection(set(resp_occupancy.sites)))

        # If config['sites'] = "", then default to all sites
        if not config['sites']:
            config['sites'] = common_sites
        else:
            for site in config['sites']:
                if site not in common_sites:
                    print('Incorrect site name.')
                    os._exit(0)
            print("Running on {0} sites".format(len(config['sites'])))

    # Define the view of meters (metadata)
    meter = pymortar.View(
        name="view_meter",
        sites=config['sites'],
        definition=query_meter,
    )

    # Define the view of OAT (metadata)
    occupancy = pymortar.View(name="view_occupancy",
                              sites=config['sites'],
                              definition=query_occupancy)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define the occupancy timeseries stream
    data_view_occupancy = pymortar.DataFrame(
        name="data_occupancy",  # dataframe column name
        aggregation=pymortar.RAW,
        window="",
        timeseries=[
            pymortar.Timeseries(view="view_occupancy", dataVars=["?point"])
        ])

    # Define timeframe
    time_params = pymortar.TimeParams(start=config['time']['start'],
                                      end=config['time']['end'])

    # Form the full request object
    request = pymortar.FetchRequest(
        sites=config['sites'],
        views=[meter, occupancy],
        dataFrames=[data_view_meter, data_view_occupancy],
        time=time_params)

    # Fetch data from request
    response = client.fetch(request)

    # Save data to csv file
    if config['save_data']:
        response['data_meter'].to_csv('meter_data.csv')
        response['data_occupancy'].to_csv('occupancy_data.csv')

    # Create results folder if it doesn't exist
    if not os.path.exists('./' + config['results_folder']):
        os.mkdir('./' + config['results_folder'])

    return response
Ejemplo n.º 16
0
            definition=
            "SELECT ?vav WHERE { ?vav rdf:type/rdfs:subClassOf* brick:Temperature_Sensor };",
        ),
        pymortar.View(
            name="meter",
            definition=
            "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Electric_Meter };",
        ),
    ],
    dataFrames=[
        pymortar.DataFrame(
            name="meter_data",
            aggregation=pymortar.MEAN,
            window="5m",
            uuids=["b8166746-ba1c-5207-8c52-74e4700e4467"],
            #timeseries=[
            #    pymortar.Timeseries(
            #        view="meter",
            #        dataVars=["?meter"],
            #    )
            #]
        )
    ],
    time=pymortar.TimeParams(
        start="2019-01-01T00:00:00Z",
        end="2019-04-01T00:00:00Z",
    ))
s = time.time()
res = client.fetch(req)
e = time.time()
print("took {0}".format(e - s))
print(res)
Ejemplo n.º 17
0
 views=[
     pymortar.View(
         name="airflow_sensors",
         definition=air_flow_sensor_query,
     ),
     pymortar.View(
         name="airflow_sps",
         definition=air_flow_setpoint_query,
     )
 ],
 dataFrames=[
     pymortar.DataFrame(name="sensors",
                        aggregation=pymortar.MEAN,
                        window="30m",
                        timeseries=[
                            pymortar.Timeseries(
                                view="airflow_sensors",
                                dataVars=["?sensor"],
                            )
                        ]),
     pymortar.DataFrame(name="setpoints",
                        aggregation=pymortar.MEAN,
                        window="30m",
                        timeseries=[
                            pymortar.Timeseries(
                                view="airflow_sps",
                                dataVars=["?sp"],
                            )
                        ])
 ],
 time=pymortar.TimeParams(
Ejemplo n.º 18
0
start = int(
    time.mktime(
        datetime.datetime.strptime("20/04/2019 01:00:00",
                                   "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9)
end = int(
    time.mktime(
        datetime.datetime.strptime("27/04/2019 23:59:59",
                                   "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9)

print(start)
print(end)
print(uuid)

price_stream = pymortar.DataFrame(name="price_data",
                                  uuids=[uuid],
                                  aggregation=pymortar.MEAN,
                                  window="1h")

price_time_params = pymortar.TimeParams(
    start=rfc3339(int(start / 1e9 - start / 1e9 % 3600)),
    end=rfc3339(int(end / 1e9)),
)

price_request = pymortar.FetchRequest(sites=[""],
                                      dataFrames=[price_stream],
                                      time=price_time_params)

df = pymortar_client.fetch(price_request)["price_data"]

df.to_csv("new.csv")
Ejemplo n.º 19
0
def ahu_analysis(client, start_time, end_time):
    st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ")

    query = """SELECT ?cooling_point ?heating_point ?ahu WHERE {
        ?cooling_point rdf:type/rdfs:subClassOf* brick:Cooling_Valve_Command .
        ?heating_point rdf:type/rdfs:subClassOf* brick:Heating_Valve_Command .
        ?ahu bf:hasPoint ?cooling_point .
        ?ahu bf:hasPoint ?heating_point .
    };"""

    resp = client.qualify([query])
    if resp.error != "":
        print("ERROR: ", resp.error)

    points_view = pymortar.View(
        sites=resp.sites,
        name="point_type_data",
        definition=query,
    )

    point_streams = pymortar.DataFrame(
        name="points_data",
        aggregation=pymortar.MAX,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="point_type_data",
                                dataVars=["?cooling_point", "?heating_point"])
        ])

    time_params = pymortar.TimeParams(start=st, end=et)

    request = pymortar.FetchRequest(
        sites=resp.sites,
        views=[points_view],
        time=time_params,
        dataFrames=[point_streams],
    )

    response = client.fetch(request)

    ahu_df = response["points_data"]
    ahus = [
        ahu[0] for ahu in response.query("select ahu from point_type_data")
    ]

    error_df_list = []
    for ahu in ahus:
        heat_cool_query = """
            SELECT cooling_point_uuid, heating_point_uuid, site
            FROM point_type_data
            WHERE ahu = "{0}";
        """.format(ahu)
        res = response.query(heat_cool_query)
        cooling_uuid = res[0][0]
        heating_uuid = res[0][1]
        site = res[0][2]
        df = response["points_data"][[cooling_uuid, heating_uuid]].dropna()
        df.columns = ['cooling', 'heating']
        df['site'] = site
        df['ahu'] = ahu.split('#')[1]
        df['simultaneous_heat_cool'] = False
        df.loc[((df.cooling > 0) & (df.heating > 0)),
               'simultaneous_heat_cool'] = True
        if not df[df['simultaneous_heat_cool'] == True].empty:
            error_df_list.append(df[df['simultaneous_heat_cool'] == True])

    if len(error_df_list) > 0:
        error_df = pd.concat(error_df_list, axis=0)[['site', 'ahu']]
        error_df.index.name = 'time'
        error_msgs = error_df.apply(lambda x: get_error_message(x),
                                    axis=1).values
        for msg in error_msgs:
            print(msg)

        return error_df
    else:
        return pd.DataFrame()
Ejemplo n.º 20
0
    def get_meter_data(self,
                       site,
                       start,
                       end,
                       point_type="Green_Button_Meter",
                       agg='MEAN',
                       window='15m'):
        """ Get meter data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        point_type      : str
            Type of data, i.e. Green_Button_Meter, Building_Electric_Meter...
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame(), defaultdict(list)
            Meter data, dictionary that maps meter data's columns (uuid's) to sitenames.

        """

        # CHECK: Hacky code. Change this later
        if agg == 'MEAN':
            agg = pymortar.MEAN

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };"

        # Define the view of meters (metadata)
        meter = pymortar.View(name="view_meter",
                              sites=site,
                              definition=query_meter)

        # Define the meter timeseries stream
        data_view_meter = pymortar.DataFrame(
            name="data_meter",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[
                pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
            ])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[meter],
                                        dataFrames=[data_view_meter],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # resp_meter = (url, uuid, sitename)
        resp_meter = response.query('select * from view_meter')

        # Map's uuid's to the site names
        map_uuid_sitename = defaultdict(list)
        for (url, uuid, sitename) in resp_meter:
            map_uuid_sitename[uuid].append(sitename)

        return response['data_meter'], map_uuid_sitename
Ejemplo n.º 21
0
def get_meter_data(pymortar_client,
                   pymortar_objects,
                   site,
                   start,
                   end,
                   point_type="Green_Button_Meter",
                   agg='MEAN',
                   window='15m'):
    """ Get meter data from pymortar.

    Parameters
    ----------
    pymortar_client     : pymortar.Client({})
        Pymortar Client Object.
    pymortar_objects    : dict
        Dictionary that maps aggregation values to corresponding pymortar objects.
    site                : str
        Building name.
    start               : str
        Start date - 'YYYY-MM-DDTHH:MM:SSZ'
    end                 : str
        End date - 'YYYY-MM-DDTHH:MM:SSZ'
    point_type          : str
        Type of data, i.e. Green_Button_Meter, Building_Electric_Meter...
    agg                 : str
        Values include MEAN, MAX, MIN, COUNT, SUM, RAW (the temporal window parameter is ignored)
    window              : str
        Size of the moving window.

    Returns
    -------
    pd.DataFrame(), defaultdict(list)
        Meter data, dictionary that maps meter data's columns (uuid's) to sitenames.

    """

    agg = pymortar_objects.get(agg, 'ERROR')

    if agg == 'ERROR':
        raise ValueError(
            'Invalid aggregate type; should be string and in caps; values include: '
            + pymortar_objects.keys())

    query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };"

    # Define the view of meters (metadata)
    meter = pymortar.View(name="view_meter",
                          sites=[site],
                          definition=query_meter)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=agg,
        window=window,
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define timeframe
    time_params = pymortar.TimeParams(start=start, end=end)

    # Form the full request object
    request = pymortar.FetchRequest(sites=[site],
                                    views=[meter],
                                    dataFrames=[data_view_meter],
                                    time=time_params)

    # Fetch data from request
    response = pymortar_client.fetch(request)

    # resp_meter = (url, uuid, sitename)
    resp_meter = response.query('select * from view_meter')

    # Map's uuid's to the site names
    map_uuid_sitename = defaultdict(list)
    for (url, uuid, sitename) in resp_meter:
        map_uuid_sitename[uuid].append(sitename)

    return response['data_meter'], map_uuid_sitename
Ejemplo n.º 22
0
    def get_weather(self,
                    site,
                    start,
                    end,
                    point_type='Weather_Temperature_Sensor',
                    agg=pymortar.MEAN,
                    window='15m'):
        """ Get weather (OAT) data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        point_type      : str
            Type of point, i.e. Weather_Temperature_Sensor...
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame(), defaultdict(list)
            OAT data, dictionary that maps meter data's columns (uuid's) to sitenames.

        """

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_oat = "SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:" + point_type + " };"

        # Get list of sites for OAT data
        resp_oat = self.client.qualify([query_oat])

        if resp_oat.error:
            raise RuntimeError(resp_oat.error)

        # Define the view of meters (metadata)
        oat = pymortar.View(name="view_oat", sites=site, definition=query_oat)

        # Define the meter timeseries stream
        data_view_oat = pymortar.DataFrame(
            name="data_oat",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[pymortar.Timeseries(view="view_oat", dataVars=["?t"])])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[oat],
                                        dataFrames=[data_view_oat],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # resp_meter = (url, uuid, sitename)
        resp_oat = response.query('select * from view_oat')

        # Map's uuid's to the site names
        map_uuid_sitename = defaultdict(list)
        for (url, uuid, sitename) in resp_oat:
            map_uuid_sitename[uuid].append(sitename)

        return response['data_oat'], map_uuid_sitename
Ejemplo n.º 23
0
def read_config():
    """ Reads config.json file that contains parameters for baselines and fetches data from Mortar. 

	Returns
	-------
	pd.DataFrame(), pd.DataFrame(), default(list), default(list)
		meter data, oat data, map of uuid to meter data, map of uuid to oat data
	
	"""

    # Instatiate Client
    client = pymortar.Client({})

    # Query for meter data
    query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };"

    # Query for outdoor air temperature data
    query_oat = """ SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor };"""

    # Get list of sites for meter data and OAT data
    resp_meter = client.qualify([query_meter])
    resp_oat = client.qualify([query_oat])

    if resp_meter.error or resp_oat.error:
        print("ERORR: ", resp_meter.error if True else resp_oat.error)
        os._exit(0)
    else:
        # Get list of sites that are common for meter data and OAT data
        common_sites = list(
            set(resp_meter.sites).intersection(set(resp_oat.sites)))

        # If config['sites'] = "", then default to all sites
        if not config['sites']:
            config['sites'] = common_sites
        else:
            for site in config['sites']:
                if site not in common_sites:
                    print('Incorrect site name.')
                    os._exit(0)
            print("Running on {0} sites".format(len(config['sites'])))

    # Define the view of meters (metadata)
    meter = pymortar.View(
        name="view_meter",
        sites=config['sites'],
        definition=query_meter,
    )

    # Define the view of OAT (metadata)
    oat = pymortar.View(name="view_oat",
                        sites=config['sites'],
                        definition=query_oat)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define the OAT timeseries stream
    data_view_oat = pymortar.DataFrame(
        name="data_oat",
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[pymortar.Timeseries(view="view_oat", dataVars=["?t"])])

    # Define timeframe
    time_params = pymortar.TimeParams(start=config['time']['start'],
                                      end=config['time']['end'])

    # Form the full request object
    request = pymortar.FetchRequest(
        sites=config['sites'],
        views=[meter, oat],
        dataFrames=[data_view_meter, data_view_oat],
        time=time_params)

    # Fetch data from request
    data = client.fetch(request)

    # Renames columns from uuids' to sitenames'
    map_uuid_meter, map_uuid_oat = map_uuid_sitename(data)

    # Save data to csv file
    if config['save_data']:
        data['data_meter'].to_csv('meter_data.csv')
        data['data_oat'].to_csv('oat_data.csv')

    return data['data_meter'], data['data_oat'], map_uuid_meter, map_uuid_oat
Ejemplo n.º 24
0
                                dataFrames=dataframes,
                                time=timeparams)

    return client.fetch(req)


meter_view = pymortar.View(
    name="meters",
    definition="""SELECT ?meter WHERE {
            ?meter rdf:type brick:Building_Electric_Meter
        };""",
)
meter_df = pymortar.DataFrame(
    name="meters",
    aggregation=pymortar.MEAN,
    timeseries=[pymortar.Timeseries(
        view="meters",
        dataVars=['?meter'],
    )])

tstats_view = pymortar.View(
    name="tstats",
    definition="""SELECT ?rtu ?zone ?tstat ?csp ?hsp ?temp ?state WHERE {
      ?rtu rdf:type brick:RTU .
      ?tstat bf:controls ?rtu .
      ?rtu bf:feeds ?zone .
      ?tstat bf:hasPoint ?temp .
      ?temp rdf:type/rdfs:subClassOf* brick:Temperature_Sensor .

      ?tstat bf:hasPoint ?csp .
      ?csp rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint .
Ejemplo n.º 25
0
    print("ERROR: ", resp.error)
    os.exit(1)

print("running on {0} sites".format(len(resp.sites)))

# define the view of meters (metadata)
meters = pymortar.View(
    sites=resp.sites,
    name="meters",
    definition=meter_query,
)

# define the meter timeseries streams we want
meter_data = pymortar.DataFrame(
    name="meters",
    aggregation=pymortar.MEAN,
    window="15m",
    timeseries=[pymortar.Timeseries(view="meters", dataVars=["?meter"])])

# temporal parameters for the query: 2017-2018 @ 15min mean
time_params = pymortar.TimeParams(
    start="2016-01-01T00:00:00Z",
    end="2018-01-01T00:00:00Z",
)

# form the full request object
request = pymortar.FetchRequest(sites=resp.sites,
                                views=[meters],
                                dataFrames=[meter_data],
                                time=time_params)
Ejemplo n.º 26
0
qualify_resp = c.qualify([air_temp_sensor_query, air_temp_setpoint_query])
print(qualify_resp)
print("running on {0} sites".format(len(qualify_resp.sites)))
request = pymortar.FetchRequest(sites=qualify_resp.sites,
                                views=[
                                    pymortar.View(
                                        name="airtemp_sensors",
                                        definition=air_temp_sensor_query,
                                    ),
                                    pymortar.View(
                                        name="airtemp_sps",
                                        definition=air_temp_setpoint_query,
                                    )
                                ],
                                dataFrames=[
                                    pymortar.DataFrame(
                                        name="data1",
                                        aggregation=pymortar.MAX,
                                        window="15m",
                                        timeseries=[
                                            pymortar.Timeseries(
                                                view="airtemp_sensors",
                                                dataVars=["?sensor"],
                                            )
                                        ])
                                ])
(views, metadata, dataframes) = c.fetch(request)
print(views)
print(metadata)
print(dataframes)