示例#1
0
def get_power(site, start, end, agg, window, cli):

    eagle_power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Building_Electric_Meter
        };"""

    gb_power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Green_Button_Meter
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request_gb = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=gb_power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    request_eagle = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=eagle_power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    result_gb = cli.fetch(request_gb)
    result_eagle = cli.fetch(request_eagle)
    try:
        power_gb = result_gb[
            'power'] * 4000  #adjusts to from energy to power (15 min period), and from kw to w
        power_eagle = adjust(result_eagle['power'])
        power_eagle.columns = [power_gb.columns[0]]
        power = power_gb.fillna(
            value=power_eagle
        )  # power uses available gb data, fills NA with eagle data
    except:
        if np.size(result_gb['power']) > 1:
            power = result_gb['power'] * 4000
        elif np.size(result_eagle['power']) > 1:
            power = adjust(result_eagle['power'])
        else:
            print("no data")

    return power
示例#2
0
def get_zone_names():

    zones_query = """SELECT ?zone WHERE {
            ?tstat rdf:type brick:Thermostat .
            ?tstat bf:controls/bf:feeds ?zone .
            ?zone rdf:type brick:HVAC_Zone
            };"""

    # zones_query = """SELECT ?zone WHERE {
    #         ?zone rdf:type brick:HVAC_Zone
    #         };"""

    resp = pymortar_client.qualify([zones_query])

    zones_view = pymortar.View(
        name="zones_view",
        sites=resp.sites,
        definition=zones_query,
    )

    request = pymortar.FetchRequest(sites=resp.sites, views=[zones_view])

    zones_data = pymortar_client.fetch(request)

    #print(zones_data)

    print(zones_data.describe_table(viewname='zones_view'))
    #print(zones_data.query("select * from zones_view where zone like '%HVAC%'"))

    return zones_data
示例#3
0
文件: server.py 项目: kuzha/XBOS
def get_mortar_oat_building(building, start, end, window, agg,
                            pymortar_client):
    outside_temperature_query = """SELECT ?temp WHERE {
        ?temp rdf:type brick:Weather_Temperature_Sensor
    };"""

    weather_stations_view = pymortar.View(name='weather_stations_view',
                                          sites=[building],
                                          definition=outside_temperature_query)
    weather_stations_stream = pymortar.DataFrame(
        name='weather_stations',
        aggregation=agg,
        window=str(int(window)) + 's',
        timeseries=[
            pymortar.Timeseries(view='weather_stations_view',
                                dataVars=['?temp'])
        ])
    weather_stations_time_params = pymortar.TimeParams(
        start=rfc3339(start),
        end=rfc3339(end),
    )

    request = pymortar.FetchRequest(sites=[building],
                                    views=[weather_stations_view],
                                    dataFrames=[weather_stations_stream],
                                    time=weather_stations_time_params)
    outside_temperature_data = pymortar_client.fetch(
        request)['weather_stations']
    if outside_temperature_data is None:
        return None, "did not fetch data from pymortar with query: %s" % outside_temperature_query

    return outside_temperature_data, None
def get_greenbutton_id(site, use_TED_meter=False):
    if use_TED_meter or site in TED_meters:
        power_query = """SELECT ?meter WHERE {
            ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter
        };"""
    else:
        power_query = """SELECT ?meter WHERE {
                ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter
            };"""
    query_agg = pymortar.MAX
    start = '2019-01-01T00:00:00-08:00'
    end = '2019-01-02T00:00:00-08:00'
    request = pymortar.FetchRequest(
        sites=[site],
        views = [
            pymortar.View(name='power', definition=power_query)
        ],
        time = pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(
            name='power',
            aggregation=query_agg,
            window='24h',
            timeseries=[
                pymortar.Timeseries(
                    view='power',
                    dataVars=['?meter'])
            ])
        ]
    )
    result = client.fetch(request)
    return result['power'].columns[0]
示例#5
0
def get_all_points(client, site=None):
    query = """SELECT ?point ?point_type WHERE { ?point rdf:type/rdfs:subClassOf* brick:Point . ?point rdf:type ?point_type . };"""

    if site == None:
        resp = client.qualify([query])
        if resp.error != "":
            print("ERROR: ", resp.error)
            return pd.DataFrame()

        if len(resp.sites) == 0:
            return pd.DataFrame()

        sites = resp.sites
    else:
        sites = [site]

    points_view = pymortar.View(
        sites=sites,
        name="point_type_data",
        definition=query,
    )

    request = pymortar.FetchRequest(sites=sites, views=[points_view])

    response = client.fetch(request)

    if len(response.tables) == 0:
        return pd.DataFrame()

    view_df = response.view("point_type_data")
    view_df = view_df.rename({"point_type": "type"}, axis="columns")
    view_df = view_df.set_index('point')
    return view_df
示例#6
0
文件: server.py 项目: kuzha/XBOS
def _get_raw_actions(building, zone, pymortar_client, start, end, window_size, aggregation):
    """
    TODO how to deal with windows in which two different actions are performed in given zone.
    Note: GETS THE MAX ACTION IN GIVEN INTERVAL.
    :param building:
    :param zone:
    :param pymortar_client:
    :param start: datetime, timezone aware, rfc3339
    :param end: datetime, timezone aware, rfc3339
    :param window_size: string with [s, m, h, d] classified in the end. e.g. "1s" for one second.
    :return:
    """
    thermostat_action_query = """SELECT ?tstat ?status_point WHERE {
            ?tstat rdf:type brick:Thermostat .
            ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
            ?tstat bf:hasPoint ?status_point .
            ?status_point rdf:type brick:Thermostat_Status .
        };""" % (building, zone)

    # resp = pymortar_client.qualify([thermostat_action_query]) Needed to get list of all sites

    thermostat_action_view = pymortar.View(
        name="thermostat_action_view",
        sites=[building],
        definition=thermostat_action_query,
    )

    thermostat_action_stream = pymortar.DataFrame(
        name="thermostat_action",
        aggregation=aggregation,
        window=window_size,
        timeseries=[
            pymortar.Timeseries(
                view="thermostat_action_view",
                dataVars=["?status_point"],
            )
        ]
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            thermostat_action_view
        ],
        dataFrames=[
            thermostat_action_stream
        ],
        time=pymortar.TimeParams(
            start=rfc3339(start),
            end=rfc3339(end),
        )
    )

    thermostat_action_data = pymortar_client.fetch(request)["thermostat_action"]

    if thermostat_action_data is None:
        return None, "did not fetch data from pymortar with query: %s" % thermostat_action_query

    return thermostat_action_data, None
示例#7
0
文件: server.py 项目: kuzha/XBOS
def _get_raw_indoor_temperatures(building, zone, pymortar_client, start, end, window_size, aggregation):
    """
    :param building:
    :param zone:
    :param pymortar_client:
    :param start: datetime, timezone aware, rfc3339
    :param end: datetime, timezone aware, rfc3339
    :param window_size:
    :return:
    """
    temperature_query = """SELECT ?tstat ?temp WHERE {
                ?tstat rdf:type brick:Thermostat .
                ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
                ?tstat bf:hasPoint ?temp .
                ?temp  rdf:type brick:Temperature_Sensor  .
            };""" % (building, zone)

    #resp = pymortar_client.qualify([temperature_query]) Need to get list of all sites

    temperature_view = pymortar.View(
        name="temperature_view",
        sites=[building],
        definition=temperature_query,
    )

    temperature_stream = pymortar.DataFrame(
        name="temperature",
        aggregation=aggregation,
        window=window_size,
        timeseries=[
            pymortar.Timeseries(
                view="temperature_view",
                dataVars=["?temp"],
            )
        ]
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            temperature_view
        ],
        dataFrames=[
            temperature_stream
        ],
        time=pymortar.TimeParams(
            start=rfc3339(start),
            end=rfc3339(end),
        )
    )

    temperature_data = pymortar_client.fetch(request)["temperature"]

    if temperature_data is None:
        return None, "did not fetch data from pymortar with query: %s" % temperature_query

    return temperature_data, None
示例#8
0
文件: server.py 项目: kuzha/XBOS
def _get_raw_temperature_bands(building, zone, pymortar_client, start, end, window_size, aggregation):
    """
    :param building:
    :param zone:
    :param pymortar_client:
    :param start: datetime, timezone aware, rfc3339
    :param end: datetime, timezone aware, rfc3339
    :param window_size:
    :return:
    """
    temperature_bands_query = """
        SELECT ?tstat ?heating_setpoint ?cooling_setpoint WHERE {
        ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
        ?tstat bf:hasPoint ?heating_setpoint .
        ?tstat bf:hasPoint ?cooling_setpoint .
        ?heating_setpoint rdf:type brick:Supply_Air_Temperature_Heating_Setpoint .
        ?cooling_setpoint rdf:type brick:Supply_Air_Temperature_Cooling_Setpoint
    };""" % (building, zone)

    temperature_bands_view = pymortar.View(
        name="temperature_bands_view",
        sites=[building],
        definition=temperature_bands_query,
    )

    temperature_bands_stream = pymortar.DataFrame(
        name="temperature_bands",
        aggregation=aggregation,
        window=window_size,
        timeseries=[
            pymortar.Timeseries(
                view="temperature_bands_view",
                dataVars=["?heating_setpoint", "?cooling_setpoint"],
            )
        ]
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            temperature_bands_view
        ],
        dataFrames=[
            temperature_bands_stream
        ],
        time=pymortar.TimeParams(
            start=rfc3339(start),
            end=rfc3339(end),
        )
    )

    temperature_bands_data = pymortar_client.fetch(request)["temperature_bands"]

    if temperature_bands_data is None:
        return None, "did not fetch data from pymortar with query: %s" % temperature_bands_query

    return temperature_bands_data, None
示例#9
0
def get_indoor_temp_data(building, zone, window):
    start = int(
        time.mktime(
            datetime.datetime.strptime("30/09/2018 0:00:00",
                                       "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9)
    end = int(
        time.mktime(
            datetime.datetime.strptime("1/10/2018 0:00:00",
                                       "%d/%m/%Y %H:%M:%S").timetuple()) * 1e9)
    start = datetime.datetime.utcfromtimestamp(float(
        start / 1e9)).replace(tzinfo=pytz.utc)
    end = datetime.datetime.utcfromtimestamp(float(
        end / 1e9)).replace(tzinfo=pytz.utc)

    temperature_query = """SELECT ?tstat ?temp WHERE {
                    ?tstat rdf:type brick:Thermostat .
                    ?tstat bf:controls/bf:feeds <http://xbos.io/ontologies/%s#%s> .
                    ?tstat bf:hasPoint ?temp .
                    ?temp  rdf:type brick:Temperature_Sensor  .
                };""" % (building, zone)

    #resp = pymortar_client.qualify([temperature_query]) Need to get list of all sites

    temperature_view = pymortar.View(
        name="temperature_view",
        sites=[building],
        definition=temperature_query,
    )

    temperature_stream = pymortar.DataFrame(name="temperature",
                                            aggregation=pymortar.MEAN,
                                            window=window,
                                            timeseries=[
                                                pymortar.Timeseries(
                                                    view="temperature_view",
                                                    dataVars=["?temp"],
                                                )
                                            ])

    request = pymortar.FetchRequest(sites=[building],
                                    views=[temperature_view],
                                    dataFrames=[temperature_stream],
                                    time=pymortar.TimeParams(
                                        start=rfc3339(start),
                                        end=rfc3339(end),
                                    ))

    temperature_data = pymortar_client.fetch(request)

    print(temperature_data["temperature"])

    return temperature_data
示例#10
0
def get_outside_temp_data(building):

    interval = 3600

    outside_temperature_query = """SELECT ?temp WHERE {
        ?temp rdf:type brick:Weather_Temperature_Sensor .
    };"""

    weather_stations_view = pymortar.View(
        name="weather_stations_view",
        sites=[building],
        definition=outside_temperature_query,
    )

    weather_stations_stream = pymortar.DataFrame(
        name="weather_stations",
        aggregation=pymortar.MEAN,
        window=str(int(interval)) + 's',
        timeseries=[
            pymortar.Timeseries(
                view="weather_stations_view",
                dataVars=["?temp"],
            )
        ]
    )

    weather_stations_time_params = pymortar.TimeParams(
        start=rfc3339(start),
        end=rfc3339(end),
    )

    request = pymortar.FetchRequest(
        sites=[building],
        views=[
            weather_stations_view
        ],
        dataFrames=[
            weather_stations_stream
        ],
        time=weather_stations_time_params
    )

    df = outside_temperature_data = pymortar_client.fetch(request)['weather_stations']

    return df
示例#11
0
def get_points_for_equipment(equipment, building, pymortar_client):
    """Gets points for each equipment in the list of equipment"""

    v = pymortar.View(
        name=equipment,
        definition="""
        SELECT ?{eq_lower} ?point ?class FROM {building} WHERE {{
            ?{eq_lower} rdf:type brick:{eq_class} .
            ?{eq_lower} bf:hasPoint ?point .
            ?point rdf:type ?class
        }};""".format(eq_lower=equipment.lower(), eq_class=equipment, building=building) 
    )

    res = pymortar_client.fetch(pymortar.FetchRequest(
        sites=[building],
        views=[v]
    ))

    return res.view(equipment)['class'].unique()
示例#12
0
def get_weather(site, start, end, agg, window, cli):
    weather_query = """SELECT ?t WHERE {
            ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='weather', definition=weather_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='weather',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='weather',
                                                       dataVars=['?t'])
                               ])
        ])
    result = cli.fetch(request)
    return result['weather']
示例#13
0
def get_power(site, start, end, agg, window, cli):
    power_query = """SELECT ?meter WHERE {
            ?meter rdf:type brick:Green_Button_Meter
        };"""
    query_agg = eval('pymortar.' + str.upper(agg))
    request = pymortar.FetchRequest(
        sites=[site],
        views=[pymortar.View(name='power', definition=power_query)],
        time=pymortar.TimeParams(start=start, end=end),
        dataFrames=[
            pymortar.DataFrame(name='power',
                               aggregation=query_agg,
                               window=window,
                               timeseries=[
                                   pymortar.Timeseries(view='power',
                                                       dataVars=['?meter'])
                               ])
        ])
    result = cli.fetch(request)
    return result['power']
示例#14
0
def get_equipment_list(building, pymortar_client):
    """Gets unique list of equipment in the building"""

    #Get unique list of equipment
    v = pymortar.View(
        name="equipment",
        definition="""
        SELECT ?equipname ?equipclass ?point ?pointclass FROM %s WHERE {
            ?equipclass rdfs:subClassOf+ brick:Equipment .
            ?equipname rdf:type ?equipclass .
            ?equipname bf:hasPoint ?point .
            ?point rdf:type ?pointclass
        };""" % building
    )

    res = pymortar_client.fetch(pymortar.FetchRequest(
        sites=[building],
        views=[v]
    ))

    # Excludes CentralPlant for now
    equip_query = res.query("SELECT DISTINCT SUBSTR(equipclass, INSTR(equipclass, '#') + 1) AS eq FROM equipment WHERE equipname NOT LIKE '%Central%'")

    return [row[0] for row in equip_query]
示例#15
0
def get_meter_data(pymortar_client,
                   pymortar_objects,
                   site,
                   start,
                   end,
                   point_type="Green_Button_Meter",
                   agg='MEAN',
                   window='15m'):
    """ Get meter data from pymortar.

    Parameters
    ----------
    pymortar_client     : pymortar.Client({})
        Pymortar Client Object.
    pymortar_objects    : dict
        Dictionary that maps aggregation values to corresponding pymortar objects.
    site                : str
        Building name.
    start               : str
        Start date - 'YYYY-MM-DDTHH:MM:SSZ'
    end                 : str
        End date - 'YYYY-MM-DDTHH:MM:SSZ'
    point_type          : str
        Type of data, i.e. Green_Button_Meter, Building_Electric_Meter...
    agg                 : str
        Values include MEAN, MAX, MIN, COUNT, SUM, RAW (the temporal window parameter is ignored)
    window              : str
        Size of the moving window.

    Returns
    -------
    pd.DataFrame(), defaultdict(list)
        Meter data, dictionary that maps meter data's columns (uuid's) to sitenames.

    """

    agg = pymortar_objects.get(agg, 'ERROR')

    if agg == 'ERROR':
        raise ValueError(
            'Invalid aggregate type; should be string and in caps; values include: '
            + pymortar_objects.keys())

    query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };"

    # Define the view of meters (metadata)
    meter = pymortar.View(name="view_meter",
                          sites=[site],
                          definition=query_meter)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=agg,
        window=window,
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define timeframe
    time_params = pymortar.TimeParams(start=start, end=end)

    # Form the full request object
    request = pymortar.FetchRequest(sites=[site],
                                    views=[meter],
                                    dataFrames=[data_view_meter],
                                    time=time_params)

    # Fetch data from request
    response = pymortar_client.fetch(request)

    # resp_meter = (url, uuid, sitename)
    resp_meter = response.query('select * from view_meter')

    # Map's uuid's to the site names
    map_uuid_sitename = defaultdict(list)
    for (url, uuid, sitename) in resp_meter:
        map_uuid_sitename[uuid].append(sitename)

    return response['data_meter'], map_uuid_sitename
示例#16
0
client = pymortar.Client({})

meter_query = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter };"

# run qualify stage to get list of sites with electric meters
resp = client.qualify([meter_query])
if resp.error != "":
    print("ERROR: ", resp.error)
    os.exit(1)

print("running on {0} sites".format(len(resp.sites)))

# define the view of meters (metadata)
meters = pymortar.View(
    sites=resp.sites,
    name="meters",
    definition=meter_query,
)

# define the meter timeseries streams we want
meter_data = pymortar.DataFrame(
    name="meters",
    aggregation=pymortar.MEAN,
    window="15m",
    timeseries=[pymortar.Timeseries(view="meters", dataVars=["?meter"])])

# temporal parameters for the query: 2017-2018 @ 15min mean
time_params = pymortar.TimeParams(
    start="2016-01-01T00:00:00Z",
    end="2018-01-01T00:00:00Z",
)
示例#17
0
def tstat_zone_analysis(client, resample_minutes, start_time, end_time):
    st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    print(st)
    print(et)

    tstat_query = """
        SELECT ?tstat ?room ?zone ?state ?temp ?hsp ?csp WHERE {
            ?tstat bf:hasLocation ?room .
            ?zone bf:hasPart ?room .

            ?tstat bf:hasPoint ?state .
            ?tstat bf:hasPoint ?temp .
            ?tstat bf:hasPoint ?hsp .
            ?tstat bf:hasPoint ?csp .

            ?zone rdf:type/rdfs:subClassOf* brick:Zone .
            ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat .
            ?state rdf:type/rdfs:subClassOf* brick:Thermostat_Status .
            ?temp  rdf:type/rdfs:subClassOf* brick:Temperature_Sensor  .
            ?hsp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint .
            ?csp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Cooling_Setpoint .
        };
    """
    qualify_response = client.qualify([tstat_query])
    if qualify_response.error != "":
        print("ERROR: ", qualify_response.error)
        os.exit(1)

    print("Running on {0} sites".format(len(qualify_response.sites)))

    tstat_view = pymortar.View(
        name="tstat_points",
        sites=qualify_response.sites,
        definition=tstat_query,
    )

    tstat_streams = pymortar.DataFrame(
        name="thermostat_data",
        aggregation=pymortar.MAX,
        window="1m",
        timeseries=[
            pymortar.Timeseries(view="tstat_points",
                                dataVars=["?state", "?temp", "?hsp", "?csp"])
        ])

    time_params = pymortar.TimeParams(start=st, end=et)

    request = pymortar.FetchRequest(
        sites=qualify_response.sites,  # from our call to Qualify
        views=[tstat_view],
        dataFrames=[tstat_streams],
        time=time_params)
    result = client.fetch(request)

    tstat_df = result['thermostat_data']
    tstats = [
        tstat[0] for tstat in result.query("select tstat from tstat_points")
    ]

    error_df_list = []
    for tstat in tstats:
        q = """
                SELECT state_uuid, temp_uuid, hsp_uuid, csp_uuid, room, zone, site
                FROM tstat_points
                WHERE tstat = "{0}";
            """.format(tstat)
        res = result.query(q)

        if len(res) == 0:
            continue

        state_col, iat_col, hsp_col, csp_col, room, zone, site = res[0]
        df = tstat_df[[state_col, iat_col, hsp_col, csp_col]]
        df.columns = ['state', 'iat', 'hsp', 'csp']

        df2 = pd.DataFrame()
        resample_time = '{0}T'.format(resample_minutes)
        df2['min_hsp'] = df['hsp'].resample(resample_time).min()
        df2['min_csp'] = df['csp'].resample(resample_time).min()
        df2['max_hsp'] = df['hsp'].resample(resample_time).max()
        df2['max_csp'] = df['csp'].resample(resample_time).max()

        df2['heat_percent'] = df['state'].resample(resample_time).apply(
            lambda x: ((x == 1).sum() +
                       (x == 4).sum()) / resample_minutes * 100)
        df2['cool_percent'] = df['state'].resample(resample_time).apply(
            lambda x: ((x == 2).sum() +
                       (x == 5).sum()) / resample_minutes * 100)

        df2['tstat'] = tstat
        df2['room'] = room.split('#')[1]
        df2['zone'] = zone.split('#')[1]
        df2['site'] = site

        df2['both_heat_cool'] = False
        df2.loc[((df2.heat_percent > 0) & (df2.cool_percent > 0)),
                'both_heat_cool'] = True
        if not df2[df2['both_heat_cool'] == True].empty:
            error_df_list.append(df2[df2['both_heat_cool'] == True])

    if len(error_df_list) > 0:
        error_df = pd.concat(error_df_list, axis=0)[[
            'site', 'zone', 'room', 'heat_percent', 'cool_percent', 'min_hsp',
            'min_csp', 'max_hsp', 'max_csp'
        ]]
        error_df.index.name = 'time'
        error_msgs = error_df.apply(lambda x: get_error_message(x),
                                    axis=1).values
        for msg in error_msgs:
            print(msg)

        return error_df
    else:
        return pd.DataFrame()
示例#18
0
def read_config():
    """ Reads config.json file to obtain parameters and fetch data from Mortar.

	Returns
	-------
	pd.DataFrame(), pd.DataFrame(), default(list), default(list)
		meter data, occupancy data, map of uuid to meter data, map of uuid to occupancy data
	
	"""

    # Instatiate Client
    client = pymortar.Client({})

    # Query for meter data
    query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };"

    # Query for occupancy data
    query_occupancy = "SELECT ?point WHERE { ?point rdf:type/rdfs:subClassOf* brick:Occupancy_Sensor };"

    # Get list of sites for meter data and occupancy data
    resp_meter = client.qualify([query_meter])
    resp_occupancy = client.qualify([query_occupancy])

    if resp_meter.error or resp_occupancy.error:
        print("ERORR: ", resp_meter.error if True else resp_occupancy.error)
        os._exit(0)
    else:

        # Get list of sites that are common for meter data and occupancy data
        common_sites = list(
            set(resp_meter.sites).intersection(set(resp_occupancy.sites)))

        # If config['sites'] = "", then default to all sites
        if not config['sites']:
            config['sites'] = common_sites
        else:
            for site in config['sites']:
                if site not in common_sites:
                    print('Incorrect site name.')
                    os._exit(0)
            print("Running on {0} sites".format(len(config['sites'])))

    # Define the view of meters (metadata)
    meter = pymortar.View(
        name="view_meter",
        sites=config['sites'],
        definition=query_meter,
    )

    # Define the view of OAT (metadata)
    occupancy = pymortar.View(name="view_occupancy",
                              sites=config['sites'],
                              definition=query_occupancy)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define the occupancy timeseries stream
    data_view_occupancy = pymortar.DataFrame(
        name="data_occupancy",  # dataframe column name
        aggregation=pymortar.RAW,
        window="",
        timeseries=[
            pymortar.Timeseries(view="view_occupancy", dataVars=["?point"])
        ])

    # Define timeframe
    time_params = pymortar.TimeParams(start=config['time']['start'],
                                      end=config['time']['end'])

    # Form the full request object
    request = pymortar.FetchRequest(
        sites=config['sites'],
        views=[meter, occupancy],
        dataFrames=[data_view_meter, data_view_occupancy],
        time=time_params)

    # Fetch data from request
    response = client.fetch(request)

    # Save data to csv file
    if config['save_data']:
        response['data_meter'].to_csv('meter_data.csv')
        response['data_occupancy'].to_csv('occupancy_data.csv')

    # Create results folder if it doesn't exist
    if not os.path.exists('./' + config['results_folder']):
        os.mkdir('./' + config['results_folder'])

    return response
示例#19
0
def ahu_analysis(client, start_time, end_time):
    st = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
    et = end_time.strftime("%Y-%m-%dT%H:%M:%SZ")

    query = """SELECT ?cooling_point ?heating_point ?ahu WHERE {
        ?cooling_point rdf:type/rdfs:subClassOf* brick:Cooling_Valve_Command .
        ?heating_point rdf:type/rdfs:subClassOf* brick:Heating_Valve_Command .
        ?ahu bf:hasPoint ?cooling_point .
        ?ahu bf:hasPoint ?heating_point .
    };"""

    resp = client.qualify([query])
    if resp.error != "":
        print("ERROR: ", resp.error)

    points_view = pymortar.View(
        sites=resp.sites,
        name="point_type_data",
        definition=query,
    )

    point_streams = pymortar.DataFrame(
        name="points_data",
        aggregation=pymortar.MAX,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="point_type_data",
                                dataVars=["?cooling_point", "?heating_point"])
        ])

    time_params = pymortar.TimeParams(start=st, end=et)

    request = pymortar.FetchRequest(
        sites=resp.sites,
        views=[points_view],
        time=time_params,
        dataFrames=[point_streams],
    )

    response = client.fetch(request)

    ahu_df = response["points_data"]
    ahus = [
        ahu[0] for ahu in response.query("select ahu from point_type_data")
    ]

    error_df_list = []
    for ahu in ahus:
        heat_cool_query = """
            SELECT cooling_point_uuid, heating_point_uuid, site
            FROM point_type_data
            WHERE ahu = "{0}";
        """.format(ahu)
        res = response.query(heat_cool_query)
        cooling_uuid = res[0][0]
        heating_uuid = res[0][1]
        site = res[0][2]
        df = response["points_data"][[cooling_uuid, heating_uuid]].dropna()
        df.columns = ['cooling', 'heating']
        df['site'] = site
        df['ahu'] = ahu.split('#')[1]
        df['simultaneous_heat_cool'] = False
        df.loc[((df.cooling > 0) & (df.heating > 0)),
               'simultaneous_heat_cool'] = True
        if not df[df['simultaneous_heat_cool'] == True].empty:
            error_df_list.append(df[df['simultaneous_heat_cool'] == True])

    if len(error_df_list) > 0:
        error_df = pd.concat(error_df_list, axis=0)[['site', 'ahu']]
        error_df.index.name = 'time'
        error_msgs = error_df.apply(lambda x: get_error_message(x),
                                    axis=1).values
        for msg in error_msgs:
            print(msg)

        return error_df
    else:
        return pd.DataFrame()
示例#20
0
def get_point_class_dict(pymortar_client, building):
    """ Creates a dictionary of BACnet point names mapped to their respective Brick classes.
    Points are also grouped by Equipment class. Point names and classes are obtained from Pymortar. 

    e.g. { "AHU": { 
            "AHU-3.RA-T": {
                "point_name": "AHU-3.RA-T",
                "point_class": "Return_Air_Temperature_Sensor"
            }, .....,
          "VAV": { 
            .....
           }
        }

    :param pymortar_client: Pymortar client
    :param building: Name of building that is being analyzed
    :return: Dictionary of BACnet point names mapped to their respective Brick classes grouped by Equipment class
    """
    v = pymortar.View(name="equipment",
                      definition="""
            SELECT ?equipname ?equipclass ?point ?pointclass FROM %s WHERE {
                ?equipclass rdfs:subClassOf+ brick:Equipment .
                ?equipname rdf:type ?equipclass .
                ?equipname bf:hasPoint ?point .
                ?point rdf:type ?pointclass
            };""" % building)

    res = pymortar_client.fetch(
        pymortar.FetchRequest(sites=[building], views=[v]))

    eq_view = res.view('equipment')

    point_class_dict = {}
    groups = eq_view.groupby('equipclass').groups
    # Equipment classes that are not being considered and will be excluded from the dictionary
    # Add to this list of excluded equipment for other buildings
    excluded_equipment = ['Boiler', 'Chilled_Water_Pump', 'Hot_Water_Pump']
    # Clusters all points with subclasses of Fan and Damper (i.e. Exhaust_Fan)
    clustered_equipment = dict.fromkeys(["Damper", "Fan"], None)
    clustered = None

    for equipclass, indexes in groups.items():
        if equipclass not in excluded_equipment:
            for clustered_eq in clustered_equipment:
                # Union all the clustered equipment
                if clustered_eq.lower() in equipclass.lower():
                    clustered = clustered_eq
                    break

            if clustered:
                if clustered not in point_class_dict:
                    point_class_dict[clustered] = {}
            else:
                if equipclass not in point_class_dict:
                    point_class_dict[equipclass] = {}

            points = eq_view.iloc[indexes][['point', 'pointclass']].values
            for p in points:
                point_name, point_class = p[0], p[1]
                if clustered:
                    point_class_dict[clustered][point_name] = {
                        "point_name": point_name,
                        "point_class": point_class
                    }
                else:
                    point_class_dict[equipclass][point_name] = {
                        "point_name": point_name,
                        "point_class": point_class
                    }

            clustered = None

    return point_class_dict
示例#21
0
    'prooffile': 'clientproof.pem',
    'grpcservice': 'mortar/Mortar/*',
    'address': 'localhost:4587',
})
# client.qualify
resp = client.qualify([
    "SELECT ?zone WHERE { ?zone rdf:type brick:Electric_Meter };",
    "SELECT ?zone WHERE { ?zone rdf:type brick:Temperature_Sensor };"
])

req = pymortar.FetchRequest(
    sites=resp.sites,
    views=[
        pymortar.View(
            name="test1",
            definition=
            "SELECT ?vav WHERE { ?vav rdf:type/rdfs:subClassOf* brick:Temperature_Sensor };",
        ),
        pymortar.View(
            name="meter",
            definition=
            "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Electric_Meter };",
        ),
    ],
    dataFrames=[
        pymortar.DataFrame(
            name="meter_data",
            aggregation=pymortar.MEAN,
            window="5m",
            uuids=["b8166746-ba1c-5207-8c52-74e4700e4467"],
            #timeseries=[
    def get_meter_data(self,
                       site,
                       start,
                       end,
                       point_type="Green_Button_Meter",
                       agg='MEAN',
                       window='15m'):
        """ Get meter data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        point_type      : str
            Type of data, i.e. Green_Button_Meter, Building_Electric_Meter...
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame(), defaultdict(list)
            Meter data, dictionary that maps meter data's columns (uuid's) to sitenames.

        """

        # CHECK: Hacky code. Change this later
        if agg == 'MEAN':
            agg = pymortar.MEAN

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_meter = "SELECT ?meter WHERE { ?meter rdf:type brick:" + point_type + " };"

        # Define the view of meters (metadata)
        meter = pymortar.View(name="view_meter",
                              sites=site,
                              definition=query_meter)

        # Define the meter timeseries stream
        data_view_meter = pymortar.DataFrame(
            name="data_meter",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[
                pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
            ])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[meter],
                                        dataFrames=[data_view_meter],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # resp_meter = (url, uuid, sitename)
        resp_meter = response.query('select * from view_meter')

        # Map's uuid's to the site names
        map_uuid_sitename = defaultdict(list)
        for (url, uuid, sitename) in resp_meter:
            map_uuid_sitename[uuid].append(sitename)

        return response['data_meter'], map_uuid_sitename
示例#23
0
    if start is not None and end is not None:
        timeparams = pymortar.TimeParams(
            start=start.isoformat(),
            end=end.isoformat(),
        )
    req = pymortar.FetchRequest(sites=sites,
                                views=views,
                                dataFrames=dataframes,
                                time=timeparams)

    return client.fetch(req)


meter_view = pymortar.View(
    name="meters",
    definition="""SELECT ?meter WHERE {
            ?meter rdf:type brick:Building_Electric_Meter
        };""",
)
meter_df = pymortar.DataFrame(
    name="meters",
    aggregation=pymortar.MEAN,
    timeseries=[pymortar.Timeseries(
        view="meters",
        dataVars=['?meter'],
    )])

tstats_view = pymortar.View(
    name="tstats",
    definition="""SELECT ?rtu ?zone ?tstat ?csp ?hsp ?temp ?state WHERE {
      ?rtu rdf:type brick:RTU .
      ?tstat bf:controls ?rtu .
示例#24
0
# find sites with these sensors and setpoints
qualify_resp = client.qualify([air_flow_sensor_query, air_flow_setpoint_query])
if qualify_resp.error != "":
    print("ERROR: ", qualify_resp.error)
    os.exit(1)

print("running on {0} sites".format(len(qualify_resp.sites)))

# define dataset. We are keeping the airflow sensors and setpoints separate for now
# because we will join using the Views later
request = pymortar.FetchRequest(
    sites=qualify_resp.sites,
    views=[
        pymortar.View(
            name="airflow_sensors",
            definition=air_flow_sensor_query,
        ),
        pymortar.View(
            name="airflow_sps",
            definition=air_flow_setpoint_query,
        )
    ],
    dataFrames=[
        pymortar.DataFrame(name="sensors",
                           aggregation=pymortar.MEAN,
                           window="30m",
                           timeseries=[
                               pymortar.Timeseries(
                                   view="airflow_sensors",
                                   dataVars=["?sensor"],
                               )
示例#25
0
import pymortar

client = pymortar.Client({})

# client.qualify
qualify_resp = client.qualify([
    "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter };",
])

req = pymortar.FetchRequest(
    sites=qualify_resp.sites,
    views=[
        pymortar.View(
            sites=qualify_resp.sites,
            name="meter",
            definition=
            "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Building_Electric_Meter };",
        )
    ],
    dataFrames=[
        pymortar.DataFrame(
            name="meter_data",
            aggregation=pymortar.MEAN,
            window="1h",
            timeseries=[
                pymortar.Timeseries(
                    view="meter",
                    dataVars=["?meter"],
                )
            ],
        )
示例#26
0
#### QUALIFY Stage

qualify_resp = client.qualify([meter_query])
if qualify_resp.error != "":
    print("ERROR: ", qualify_resp.error)
    os.exit(1)

print("running on {0} sites".format(len(qualify_resp.sites)))

#### FETCH Stage
request = pymortar.FetchRequest(
    sites=qualify_resp.sites,
    views=[
        # defining relational table for the contents of the query (+site +meter_uuid columns)
        pymortar.View(
            name="meters",
            definition=meter_query,
        )
    ],
    dataFrames=[
        # 15min mean meter data
        pymortar.DataFrame(name="meters",
                           aggregation=pymortar.MEAN,
                           window="15m",
                           timeseries=[
                               pymortar.Timeseries(view="meters",
                                                   dataVars=["?meter"])
                           ])
    ],
    time=pymortar.TimeParams(
        start="2016-01-01T00:00:00Z",
        end="2019-01-01T00:00:00Z",
示例#27
0
def read_config():
    """ Reads config.json file that contains parameters for baselines and fetches data from Mortar. 

	Returns
	-------
	pd.DataFrame(), pd.DataFrame(), default(list), default(list)
		meter data, oat data, map of uuid to meter data, map of uuid to oat data
	
	"""

    # Instatiate Client
    client = pymortar.Client({})

    # Query for meter data
    query_meter = "SELECT ?meter WHERE { ?meter rdf:type/rdfs:subClassOf* brick:Green_Button_Meter };"

    # Query for outdoor air temperature data
    query_oat = """ SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:Weather_Temperature_Sensor };"""

    # Get list of sites for meter data and OAT data
    resp_meter = client.qualify([query_meter])
    resp_oat = client.qualify([query_oat])

    if resp_meter.error or resp_oat.error:
        print("ERORR: ", resp_meter.error if True else resp_oat.error)
        os._exit(0)
    else:
        # Get list of sites that are common for meter data and OAT data
        common_sites = list(
            set(resp_meter.sites).intersection(set(resp_oat.sites)))

        # If config['sites'] = "", then default to all sites
        if not config['sites']:
            config['sites'] = common_sites
        else:
            for site in config['sites']:
                if site not in common_sites:
                    print('Incorrect site name.')
                    os._exit(0)
            print("Running on {0} sites".format(len(config['sites'])))

    # Define the view of meters (metadata)
    meter = pymortar.View(
        name="view_meter",
        sites=config['sites'],
        definition=query_meter,
    )

    # Define the view of OAT (metadata)
    oat = pymortar.View(name="view_oat",
                        sites=config['sites'],
                        definition=query_oat)

    # Define the meter timeseries stream
    data_view_meter = pymortar.DataFrame(
        name="data_meter",  # dataframe column name
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[
            pymortar.Timeseries(view="view_meter", dataVars=["?meter"])
        ])

    # Define the OAT timeseries stream
    data_view_oat = pymortar.DataFrame(
        name="data_oat",
        aggregation=pymortar.MEAN,
        window="15m",
        timeseries=[pymortar.Timeseries(view="view_oat", dataVars=["?t"])])

    # Define timeframe
    time_params = pymortar.TimeParams(start=config['time']['start'],
                                      end=config['time']['end'])

    # Form the full request object
    request = pymortar.FetchRequest(
        sites=config['sites'],
        views=[meter, oat],
        dataFrames=[data_view_meter, data_view_oat],
        time=time_params)

    # Fetch data from request
    data = client.fetch(request)

    # Renames columns from uuids' to sitenames'
    map_uuid_meter, map_uuid_oat = map_uuid_sitename(data)

    # Save data to csv file
    if config['save_data']:
        data['data_meter'].to_csv('meter_data.csv')
        data['data_oat'].to_csv('oat_data.csv')

    return data['data_meter'], data['data_oat'], map_uuid_meter, map_uuid_oat
示例#28
0
    def get_weather(self,
                    site,
                    start,
                    end,
                    point_type='Weather_Temperature_Sensor',
                    agg=pymortar.MEAN,
                    window='15m'):
        """ Get weather (OAT) data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        point_type      : str
            Type of point, i.e. Weather_Temperature_Sensor...
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame(), defaultdict(list)
            OAT data, dictionary that maps meter data's columns (uuid's) to sitenames.

        """

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_oat = "SELECT ?t WHERE { ?t rdf:type/rdfs:subClassOf* brick:" + point_type + " };"

        # Get list of sites for OAT data
        resp_oat = self.client.qualify([query_oat])

        if resp_oat.error:
            raise RuntimeError(resp_oat.error)

        # Define the view of meters (metadata)
        oat = pymortar.View(name="view_oat", sites=site, definition=query_oat)

        # Define the meter timeseries stream
        data_view_oat = pymortar.DataFrame(
            name="data_oat",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[pymortar.Timeseries(view="view_oat", dataVars=["?t"])])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[oat],
                                        dataFrames=[data_view_oat],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # resp_meter = (url, uuid, sitename)
        resp_oat = response.query('select * from view_oat')

        # Map's uuid's to the site names
        map_uuid_sitename = defaultdict(list)
        for (url, uuid, sitename) in resp_oat:
            map_uuid_sitename[uuid].append(sitename)

        return response['data_oat'], map_uuid_sitename
示例#29
0
    def get_tstat(self, site, start, end, agg=pymortar.MAX, window='1m'):
        """ Get tstat data from Mortar.

        Parameters
        ----------
        site            : list(str)
            List of sites.
        start           : str
            Start date - 'YYYY-MM-DDTHH:MM:SSZ'
        end             : str
            End date - 'YYYY-MM-DDTHH:MM:SSZ'
        agg             : pymortar aggregation object
            Values include pymortar.MEAN, pymortar.MAX, pymortar.MIN, 
        pymortar.COUNT, pymortar.SUM, pymortar.RAW (the temporal window parameter is ignored)
        window          : str
            Size of the moving window.
        
        Returns
        -------
        pd.DataFrame()
            Dataframe containing tstat data for all sites.

        """

        # CHECK: Does Mortar take in UTC or local time?
        # Convert time to UTC
        start = self.convert_to_utc(start)
        end = self.convert_to_utc(end)

        query_tstat = "SELECT ?tstat ?room ?zone ?state ?temp ?hsp ?csp WHERE { \
            ?tstat bf:hasLocation ?room . \
            ?zone bf:hasPart ?room . \
            ?tstat bf:hasPoint ?state . \
            ?tstat bf:hasPoint ?temp . \
            ?tstat bf:hasPoint ?hsp . \
            ?tstat bf:hasPoint ?csp . \
            ?zone rdf:type/rdfs:subClassOf* brick:Zone . \
            ?tstat rdf:type/rdfs:subClassOf* brick:Thermostat . \
            ?state rdf:type/rdfs:subClassOf* brick:Thermostat_Status . \
            ?temp  rdf:type/rdfs:subClassOf* brick:Temperature_Sensor  . \
            ?hsp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Heating_Setpoint . \
            ?csp   rdf:type/rdfs:subClassOf* brick:Supply_Air_Temperature_Cooling_Setpoint . \
        };"

        # Get list of sites for tstat data
        resp_tstat = self.client.qualify([query_tstat])

        if resp_tstat.error:
            raise RuntimeError(resp_tstat.error)

        # Define the view of tstat (metadata)
        tstat = pymortar.View(name="view_tstat",
                              sites=site,
                              definition=query_tstat)

        # Define the meter timeseries stream
        data_view_tstat = pymortar.DataFrame(
            name="data_tstat",  # dataframe column name
            aggregation=agg,
            window=window,
            timeseries=[
                pymortar.Timeseries(
                    view="view_tstat",
                    dataVars=["?state", "?temp", "?hsp", "?csp"])
            ])

        # Define timeframe
        time_params = pymortar.TimeParams(start=start, end=end)

        # Form the full request object
        request = pymortar.FetchRequest(sites=site,
                                        views=[tstat],
                                        dataFrames=[data_view_tstat],
                                        time=time_params)

        # Fetch data from request
        response = self.client.fetch(request)

        # Final dataframe containing all sites' data
        df_result = pd.DataFrame()

        tstat_df = response['data_tstat']
        tstats = [
            tstat[0]
            for tstat in response.query("select tstat from view_tstat")
        ]
        error_df_list = []

        for i, tstat in enumerate(tstats):

            q = """
                SELECT state_uuid, temp_uuid, hsp_uuid, csp_uuid, room, zone, site
                FROM view_tstat
                WHERE tstat = "{0}";
            """.format(tstat)

            res = response.query(q)
            if not res:
                continue

            state_col, iat_col, hsp_col, csp_col, room, zone, site = res[0]
            df = tstat_df[[state_col, iat_col, hsp_col, csp_col]]

            # A single site has many tstat points. Adding site+str(i) distinguishes each of them.
            # CHECK: This can have a better naming scheme.
            df.columns = [
                site + str(i) + '_state', site + str(i) + '_iat',
                site + str(i) + '_hsp', site + str(i) + '_csp'
            ]

            df_result = df_result.join(df, how='outer')

        return df_result