Exemplo n.º 1
0
    def setup(self, opts):
        """Set up what streams are to be subsampled.

        We'll only find new streams on a restart ATM.
        """
        restrict = opts.get(
            "Restrict", "has Path and (not has Metadata/Extra/SourceStream)")
        OperatorDriver.setup(self,
                             opts,
                             shelveoperators=False,
                             raw=True,
                             inherit_metadata=False)
        client = SmapClient(smapconf.BACKEND)
        source_ids = client.tags(restrict, 'uuid, Properties/UnitofMeasure')
        for new in source_ids:
            id = str(new['uuid'])
            if not 'Properties/UnitofMeasure' in new:
                new['Properties/UnitofMeasure'] = ''
            if not id in self.operators:
                o1 = SubsampleOperator([new], 300)
                self.add_operator('/%s/%s' % (id, o1.name), o1)
                o2 = SubsampleOperator([new], 3600)
                self.add_operator('/%s/%s' % (id, o2.name), o2)
        log.msg("Done setting up subsample driver; " + str(len(source_ids)) +
                " ops")
Exemplo n.º 2
0
    def setup(self, opts):
        self.restrict = opts.get("Restrict")
        self.group = opts.get("Group")
        self.opstr = opts.get("Operator")
        OperatorDriver.setup(self, opts, self.restrict, shelveoperators=False)

        # look up the streams, units, and group tags.
        client = SmapClient()
        streams = client.tags(self.restrict, '*')
                              # 'uuid, Properties/UnitofMeasure, Metadata/SourceName, %s' % 
                              # self.group)
        #print streams
        groupitems = {}

        # find the groups
        for s in streams:
            if not s[self.group] in groupitems:
                groupitems[s[self.group]] = []
            groupitems[s[self.group]].append(s)

        # instantiate one operator per group with the appropriate inputs
        for group, tags in groupitems.iteritems():
            inputs = map(operator.itemgetter('uuid'), tags)
            op = self.operator_class(tags)
            path = '/' + util.str_path(group)
            self.add_operator(path, op)
Exemplo n.º 3
0
    def setup(self,opts):
        ""

        restrict=opts.get('Restrict','')
        # client=SmapClient()
        client=SmapClient(HOST)
        source_ids=client.tags(restrict,'uuid, Properties/UnitofMeasure')
        for id in source_ids:
            ""
            uuid=str(id['uuid'])
            if not 'Properties/UnitofMeasure' in id:
                id['Properties/UnitofMeasure'] = ''
            if not uuid in self.operators:
                ""

        RepublishClient()
Exemplo n.º 4
0
    def send_alert(self, to, alert, streams, level):
        # look up the tags for these streams
        uuids = set(streams.keys())
        uuids = map(lambda u: "uuid = '%s'" % u, uuids)
        client = SmapClient()
        tags = client.tags(' or '.join(uuids), nest=True)
        tags = dict(((x['uuid'], x) for x in tags))

        def make_context(params):
            rv = []
            for uid, state in params.iteritems():
                t, v = state['time'], state['value']
                if uid in tags:
                    rv.append(tags[uid])
                    rv[-1]['AlertTime'] = time.ctime(t / 1000)
                    rv[-1]['AlertValue'] = v
            return rv

        context = make_context(streams)
        logentry = Log(alert=alert, when=datetime.datetime.now())
        logentry.save()

        # generate the text to send, by building a context for our
        # template.
        template = Template(self.template)
        context = Context({
            'streams':
            context,
            'level':
            level,
            'permalink':
            settings.ROOT_NETLOC + '/admin/alert/log/' + str(logentry.id),
            'alarmpage':
            settings.ROOT_NETLOC + '/admin/alert/alert/' + str(alert.id),
            'timestamp':
            logentry.when,
            'alarm':
            alert.__unicode__(),
        })
        logentry.message = template.render(context)
        print logentry.message
        logentry.save()

        emaillib.send(to, '%s from %s' % (level, settings.ROOT_NETLOC),
                      logentry.message)
Exemplo n.º 5
0
    def setup(self, opts):
        """Set up what streams are to be subsampled.

        We'll only find new streams on a restart ATM.
        """
        restrict = opts.get("Restrict", 
                            "has Path and (not has Metadata/Extra/SourceStream)")
        OperatorDriver.setup(self, opts, shelveoperators=False, raw=True,
                             inherit_metadata=False)
        client = SmapClient(smapconf.BACKEND)
        source_ids = client.tags(restrict, 'uuid, Properties/UnitofMeasure')
        for new in source_ids:
            id = str(new['uuid'])
            if not 'Properties/UnitofMeasure' in new:
                new['Properties/UnitofMeasure'] = ''
            if not id in self.operators:
                o1 = SubsampleOperator([new], 300)
                self.add_operator('/%s/%s' % (id, o1.name), o1)
                o2 = SubsampleOperator([new], 3600)
                self.add_operator('/%s/%s' % (id, o2.name), o2)
        log.msg("Done setting up subsample driver; " + str(len(source_ids)) + " ops")
Exemplo n.º 6
0
    def send_alert(self, to, alert, streams, level):
        # look up the tags for these streams
        uuids = set(streams.keys())
        uuids = map(lambda u: "uuid = '%s'" % u, uuids)
        client = SmapClient()
        tags = client.tags(' or '.join(uuids), nest=True)
        tags = dict(((x['uuid'], x) for x in tags))

        def make_context(params):
            rv = []
            for uid, state in params.iteritems():
                t, v = state['time'], state['value']
                if uid in tags:
                    rv.append(tags[uid])
                    rv[-1]['AlertTime'] = time.ctime(t/1000)
                    rv[-1]['AlertValue'] = v
            return rv
        context = make_context(streams)
        logentry = Log(alert=alert, 
                       when=datetime.datetime.now())
        logentry.save()

        # generate the text to send, by building a context for our
        # template.
        template = Template(self.template)
        context = Context({
                'streams' :  context,
                'level' : level,
                'permalink' : settings.ROOT_NETLOC + '/admin/alert/log/' + str(logentry.id),
                'alarmpage' : settings.ROOT_NETLOC + '/admin/alert/alert/' + str(alert.id),
                'timestamp' : logentry.when,
                'alarm' : alert.__unicode__(),
                })
        logentry.message = template.render(context)
        print logentry.message
        logentry.save()

        emaillib.send(to, '%s from %s' % (level, settings.ROOT_NETLOC), logentry.message)
Exemplo n.º 7
0
for name in rh_coils:
  rh_stream_names += ['coil_closed_temp_change_' + name] + \
                     ['hot_water_' + name] + \
                     ['instantaneous_zone_load_' + name]

c = SmapClient(base='http://new.openbms.org/backend',\
               key='XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8')

# Query necessary tags for energy data
source_energy = 'Sutardja Dai Hall Energy Data'
all_points = pointnames + rh_stream_names + chw_stream_names
where_energy = "Metadata/SourceName = '%s' and Path ~ '%s' and (" \
                 %(source_energy, p)\
                 + ' or '.join(["Path ~ '%s'"] * len(all_points))\
                 %tuple(all_points) + ")"
tags_energy = c.tags(where_energy)

# Query data for energy calcs as AHU level
source = 'Sutardja Dai Hall BACnet'
path_list = {
                'AH2A_SF_VFD' : 'SDH/AH2A/SF_VFD:POWER',
                'AH2B_SF_VFD' : 'SDH/AH2B/SF_VFD:POWER',
                'panel_power' : 'SDH/SW/MSA.CD4RA.PWR_REAL_3_P',
                'measured_chw' : 'SDH/CHW/OFFICE.TONNAGE',
                'AH2A_SAT' : 'SDH/AH2A/SAT',
                'AH2B_SAT' : 'SDH/AH2B/SAT',
                'AH2A_MAT' : 'SDH/AH2A/MAT',
                'AH2B_MAT' : 'SDH/AH2B/MAT',
                'AH2A_CCV' : 'SDH/AH2A/CCV',
                'AH2B_CCV' : 'SDH/AH2B/CCV',
                'AH2A_SF_CFM' : 'SDH/AH2A/SF_CFM',
Exemplo n.º 8
0
 def load_tags(self):
     """Load the matching tags (in a thread)"""
     c = SmapClient(self.source_url)
     return c.tags(self.restrict)
Exemplo n.º 9
0
def data_acquisition():
###############################################################################   

    day0 = '6-1-2014'
    day1 = '9-9-2015'
    timestep=15 # timestep in minutes
    
###############################################################################    
    # make a client
    client = SmapClient("http://www.openbms.org/backend")
    
    # start and end values are Unix timestamps
    start = dtutil.dt2ts(dtutil.strptime_tz(day0, "%m-%d-%Y"))
    end   = dtutil.dt2ts(dtutil.strptime_tz(day1, "%m-%d-%Y")) 
    
    print 'Download start..'
    # perform temperature data download
    T_tags1 = client.tags("uuid = '" + T_uuid1 + "'")[0]
    T_data1 = client.data_uuid([T_uuid1], start, end, cache=True)[0]
    
    T_tags2 = client.tags("uuid = '" + T_uuid2 + "'")[0]
    T_data2 = client.data_uuid([T_uuid2], start, end, cache=True)[0]
    
    # perform humidity data download
    Hum_tags3 = client.tags("uuid = '" + Hum_uuid3 + "'")[0]
    Hum_data3 = client.data_uuid([Hum_uuid3], start, end, cache=True)[0]
    
    Hum_tags4 = client.tags("uuid = '" + Hum_uuid4 + "'")[0]
    Hum_data4 = client.data_uuid([Hum_uuid4], start, end, cache=True)[0]
    
    # perform co2 data download
    co2_tags5 = client.tags("uuid = '" + co2_uuid5 + "'")[0]
    co2_data5 = client.data_uuid([co2_uuid5], start, end, cache=True)[0]
    
    co2_tags6 = client.tags("uuid = '" + co2_uuid6 + "'")[0]
    co2_data6 = client.data_uuid([co2_uuid6], start, end, cache=True)[0]
    
    # perform outdoor temperature download and correction
    T_outdoor_tags7 = client.tags("uuid = '" + T_outdoor_uuid7 + "'")[0]
    T_outdoor_data7 = client.data_uuid([T_outdoor_uuid7], start, end, cache=True)[0]
    for i in range(len(T_outdoor_data7)):
        if T_outdoor_data7 [i][1]>0:
            T_outdoor_data7 [i][1]=(T_outdoor_data7 [i][1]-32)*5/9
        else:
            T_outdoor_data7 [i][1]=T_outdoor_data7 [i-1][1]
        if T_outdoor_data7 [i][0]<10000:
            T_outdoor_data7 [i][0]=T_outdoor_data7 [i-1][0]+32000
    T_outdoor_tags7 = client.tags("uuid = '" + T_outdoor_uuid7 + "'")[0]
    
    # perform power data download
    light_data = client.data_uuid([light_uuid1], start, end, cache=True)[0]
    recep_data = client.data_uuid([recep_uuid1], start, end, cache=True)[0]
    fan_power_total_data = client.data_uuid([fan_power_total_uuid1], start, end, cache=True)[0]
    
    air_a_sat_data = client.data_uuid([air_a_sat_uuid1], start, end, cache=True)[0]
    air_b_sat_data = client.data_uuid([air_b_sat_uuid1 ], start, end, cache=True)[0]
    air_a_mat_data= client.data_uuid([air_a_mat_uuid1], start, end, cache=True)[0]
    air_b_mat_data = client.data_uuid([air_b_mat_uuid1], start, end, cache=True)[0]
    for data in [air_a_sat_data , air_a_mat_data , air_b_sat_data , air_b_mat_data]:
        for i in range(len(data)):
            data[i][1]=(data[i][1] - 32)*5/9
    
    air_a_flow_data = client.data_uuid([air_a_flow_uuid1], start, end, cache=True)[0]
    air_b_flow_data = client.data_uuid([air_b_flow_uuid1], start, end, cache=True)[0]
    vav_data=[]
    for i in VAV_flow_uuid:
        download=client.data_uuid([i], start, end, cache=True)[0]
        vav_data.append(download)
    flow_floor_4=[]
    for j in range(min([len(x) for x in vav_data])):
        somme=0
        for i in vav_data:
            somme=somme+i[j][1]
        flow_floor_4.append([vav_data[0][j][0] , somme])
    
    # perform temperature setpoints download
    T_setpt_data=[]
    setpt_download=[]
    for i in VAV_setpt_uuid:
        download=client.data_uuid([i], start, end, cache=True)[0]
        setpt_download.append(download)    
    for j in range(min([len(x) for x in setpt_download])):
        value = sum([setpt_download[i][j][1] for i in range(len(setpt_download))])/len(setpt_download)
        T_setpt_data.append([setpt_download[1][j][0] , value])
    
    
    print 'Download done' 
    print
    
###############################################################################    
    
    print 'Calculation start..'
    # convert temperature setpoint unit
    for i in range(len(T_setpt_data)):
        T_setpt_data[i][1]=( T_setpt_data[i][1]-32)*5/9
    
    # calculate ventilation power
    vent_power_data=[]
    for i in range(min(len(fan_power_total_data) , len(air_a_flow_data) , len(air_b_flow_data) , len(flow_floor_4))):
        power=fan_power_total_data[i][1]*flow_floor_4[i][1]/(air_a_flow_data[i][1] + air_b_flow_data[i][1])
        vent_power_data.append([flow_floor_4[i][0] , power])
    
    # calculate H/C power   
    capacity=0.00056937  #kW/C.cfm
    ratio=[]
    for i in range(min(len(air_a_flow_data), len(air_b_flow_data), len(flow_floor_4))):
        division=   flow_floor_4[i][1]/(air_a_flow_data[i][1] + air_b_flow_data[i][1]) 
        ratio.append([air_a_flow_data[i][0] , division ])
    cool_power_data=[]
    for i in range(min(len(air_a_sat_data) , len(air_b_sat_data) , len(air_a_mat_data) , len(air_b_mat_data) , len(ratio))): 
        result = ((capacity*(air_a_sat_data[i][1]-air_a_mat_data[i][1])*air_a_flow_data[i][1])  +  (capacity*(air_b_sat_data[i][1]-air_b_mat_data[i][1])*air_b_flow_data[i][1]))*ratio[i][1]
        cool_power_data.append([ ratio[i][0] , result])
    H_C_power_data=[]
    for i in range(min(len(light_data) , len(recep_data) , len(cool_power_data))): 
        H_C_power_data.append([cool_power_data[i][0] , cool_power_data[i][1] + light_data[i][1]+ recep_data[i][1]])
  
###############################################################################    
    # interpolate the data over a fixed time step    
    imposed_time=[]
    x = max( T_data1[0][0], T_data2[0][0], Hum_data3[0][0], Hum_data4[0][0], co2_data5[0][0], co2_data6[0][0],  T_outdoor_data7[0][0] ) 
    limit= min( T_data1[-1][0] , T_data2[-1][0], Hum_data3[-1][0], Hum_data4[-1][0], co2_data5[-1][0], co2_data6[-1][0],  T_outdoor_data7[-1][0] )
    while x <= limit:
        imposed_time.append(x)
        x += timestep*60*1000
        
    def interpole(data, time):
        time1 = [item[0] for item in data]
        value1= [item[1] for item in data]
        data_synchro = interp1d(time1,value1)(time)
        data_synchro = [i for i in data_synchro]
        return data_synchro
    
    # interpolate temperature setpoint data
    T_setpt_data_synchro=interpole(T_setpt_data, imposed_time)
    
    # interpolate temperature data
    T_data1_synchro=interpole(T_data1, imposed_time)
    T_data2_synchro=interpole(T_data2, imposed_time)
    
    # interpolate humidity data
    Hum_data3_synchro=interpole(Hum_data3, imposed_time)
    Hum_data4_synchro=interpole(Hum_data4, imposed_time)
    
    # interpolate co2 data
    co2_data5_synchro=interpole(co2_data5, imposed_time)
    co2_data6_synchro=interpole(co2_data6, imposed_time)
    
    # interpolate co2 data
    T_outdoor_data7_synchro=interpole(T_outdoor_data7, imposed_time)
    
    # intepole ventilation data
    vent_power_data_synchro=interpole(vent_power_data, imposed_time)
    
    # interpole cooling power data
    H_C_power_data_synchro=interpole(H_C_power_data, imposed_time)
    

    # average each type of data
    T_data_sychro_average= [(a+b)/2 for a,b in zip(T_data1_synchro, T_data2_synchro)]
    for i in range(len(T_data_sychro_average)):
        T_data_sychro_average[i]=(T_data_sychro_average[i]-32)*5/9
    Hum_data_sychro_average= [(a+b)/2 for a,b in zip(Hum_data3_synchro, Hum_data4_synchro)]
    co2_data_sychro_average= [(a+b)/2 for a,b in zip(co2_data5_synchro, co2_data6_synchro)]
    for i in range(len(co2_data_sychro_average)):
        if co2_data_sychro_average[i]>1500:
            co2_data_sychro_average [i]=co2_data_sychro_average [i-1]
    
    
    # calculate the calendar data
    Calendar_data=[]
    Season=[]
    Human_date=[]
    Human_power=[]
    ###### Calendar data=0 --> weekend
    ###### Calendar data=1 --> work night
    ###### Calendar data=2 --> work day
    ###### Season =  1 --> winter
    ###### Season =  0 --> mid season
    ###### Season = -1 --> summer
    for i in imposed_time:
        date = datetime.datetime.fromtimestamp(i/1000).strftime('%Y-%m-%d %H:%M:%S')
        Human_date.append(date)
        year = int (date[0:4])
        month = int (date[5:7])
        day = int (date[8:10])
        hour = int(date[11:13])
        day_number=datetime.date(year, month, day).weekday()
        if month==1 or month==2 or month==12:
            Season.append(1)
        if month==6 or month==7 or month==8 or month==9:
            Season.append(-1)
        if month==3 or month==4 or month==5 or month==10 or month==11:
            Season.append(0)
        
        if day_number!=5 and day_number!=6:
            if hour>=7 and hour<19:
                Cal_day = 2
            else: 
                Cal_day = 1
        else: 
            Cal_day = 0
        Calendar_data.append(Cal_day)
        
        
        
        if Cal_day==2 and (hour<16 and hour>=10):
            number=30
        else:
            if Cal_day==2 and ((hour<10 and hour>=7) or (hour<19 and hour>=16)):
                number=15
            else:
                number=0
        Human_power.append( number*0.1 )
    
###############################################################################    
    # output of the data aquisition            
    DATA_LIST={'Timestamp':imposed_time, 'Temperature':T_data_sychro_average, 'Humidity':Hum_data_sychro_average, 'Calendar data':Calendar_data,'Human date':Human_date , 'Season':Season, 'CO2':co2_data_sychro_average, 'Outdoor Temperature':T_outdoor_data7_synchro , 'Ventilation':vent_power_data_synchro , 'H/C power': H_C_power_data_synchro , 'Setpoint Temperature':T_setpt_data_synchro, 'Human_power':Human_power}
    
    print 'Calculation and interpolation done'
    print
    
    print 'Write start..'
    # Create a path and a file to save the data
    workbook = xlsxwriter.Workbook('DATA_LIST.xlsx')
    worksheet = workbook.add_worksheet()
    col=0
    for keys in DATA_LIST.keys():    
        worksheet.write(0, col , keys)
        col=col+1
    col=0
    for data in DATA_LIST.values():
        row=1    
        for value in data:
                worksheet.write(row, col , value)
                row += 1
        col=col+1
    workbook.close()
    
    # save the metadata of the used sensors 
    i=0
    for metadata in [T_tags1,T_tags2, Hum_tags3, Hum_tags4, co2_tags5, co2_tags6, T_outdoor_tags7]:
        i=i+1
        with open(os.path.join('.Cache', 'metadata'+str(i)+'.txt'), 'w') as f:
            for key, value in metadata.items():
                f.write(key+':'+value+'\n')
    
    print 'Write done'
    return DATA_LIST
Exemplo n.º 10
0
from smap.archiver.client import SmapClient
from smap.contrib import dtutil

from matplotlib import pyplot
from matplotlib import dates

# make a client
c = SmapClient("http://www.openbms.org/backend")

# start and end values are Unix timestamps
start = dtutil.dt2ts(dtutil.strptime_tz("1-1-2013", "%m-%d-%Y"))
end   = dtutil.dt2ts(dtutil.strptime_tz("1-2-2013", "%m-%d-%Y"))

# download the data and metadata
tags = c.tags("Metadata/Extra/Type = 'oat'")
uuids, data = c.data("Metadata/Extra/Type = 'oat'", start, end)

# make a dict mapping uuids to data vectors
data_map = dict(zip(uuids, data))

# plot all the data
for timeseries in tags:
  d = data_map[timeseries['uuid']]
  # since we have the tags, we can add some metadata
  label = "%s (%s)" % (timeseries['Metadata/SourceName'],
                       timeseries['Properties/UnitofMeasure'])
  # we can plot all of the series in their appropriate time zones
  pyplot.plot_date(dates.epoch2num(d[:, 0] / 1000), d[:, 1], '-', 
                   label=label,
                   tz=timeseries['Properties/Timezone'])
Exemplo n.º 11
0
import numpy as np
import quantities as pq

from matplotlib import pyplot
from matplotlib import dates

##t = time.time()
t = time.strptime("30 Nov 15 23 40 00", "%d %b %y %H %M %S")
t = time.mktime(t)
c = SmapClient("http://www.openbms.org/backend")
source = "Metadata/SourceName = 'Sutardja Dai Hall BACnet'"
where = source + " and Path ~ 'S[0-9]-[0-9][0-9]' and" +\
        "(Path ~ 'CLG_LOOPOUT' or Path ~ 'HTG_LOOPOUT' or " +\
        "Path ~ 'AIR_VOLUME' or Path ~ 'VLV_POS' or " +\
        "Path ~ 'AI_3' or Path ~ '')"
tags = c.tags(where)
data = c.prev(where, t, streamlimit=1000, limit=1000)
vavs_rhv = [] # vav with reheat valve
vavs_rhv = ['S1-01', 'S1-02', 'S1-03', 'S1-04', 'S1-07', 'S1-08', 'S1-09', 'S1-10', 'S1-13', 'S1-15', 'S1-16', 'S1-17', 'S1-18', 'S1-19', 'S1-20', 'S2-01', 'S2-02', 'S2-03', 'S2-04', 'S2-05', 'S2-06', 'S2-07', 'S2-10', 'S2-11', 'S2-12', 'S2-13', 'S2-14', 'S2-15', 'S2-16', 'S2-17', 'S2-18', 'S2-19', 'S2-20', 'S2-21', 'S3-01', 'S3-02', 'S3-03', 'S3-04', 'S3-05', 'S3-06', 'S3-07', 'S3-08', 'S3-09', 'S3-10', 'S3-11', 'S3-12', 'S3-15', 'S3-16', 'S3-17', 'S3-18', 'S3-19', 'S3-20', 'S3-21', 'S4-01', 'S4-02', 'S4-03', 'S4-04', 'S4-05', 'S4-06', 'S4-07', 'S4-08', 'S4-09', 'S4-11', 'S4-12', 'S4-13', 'S4-15', 'S4-16', 'S4-18', 'S4-19', 'S4-20', 'S4-21', 'S5-01', 'S5-02', 'S5-03', 'S5-04', 'S5-05', 'S5-06', 'S5-07', 'S5-08', 'S5-09', 'S5-10', 'S5-11', 'S5-12', 'S5-13', 'S5-14', 'S5-16', 'S5-18', 'S5-19', 'S5-20', 'S5-21', 'S6-01', 'S6-02', 'S6-03', 'S6-04', 'S6-05', 'S6-06', 'S6-07', 'S6-08', 'S6-10', 'S6-11', 'S6-12', 'S6-13', 'S6-15', 'S6-17', 'S6-18', 'S6-19', 'S6-20', 'S7-01', 'S7-02', 'S7-03', 'S7-04', 'S7-05', 'S7-06', 'S7-07', 'S7-08', 'S7-09', 'S7-10', 'S7-13', 'S7-14', 'S7-15', 'S7-16']


for v in sorted(vavs_rhv):
    u_rhv = [tag['uuid'] for tag in tags if v in tag['Path'] and 'VLV_POS' in tag ['Path']]
    u_ht = [tag['uuid'] for tag in tags if v in tag['Path'] and 'HTG_LOOPOUT' in tag ['Path']]    
    u_dat = [tag['uuid'] for tag in tags if v in tag['Path'] and 'AI_3' in tag ['Path']]
    u_cl = [tag['uuid'] for tag in tags if v in tag['Path'] and 'CLG_LOOPOUT' in tag ['Path']]    
    u_cfm = [tag['uuid'] for tag in tags if v in tag['Path'] and 'AIR_VOLUME' in tag ['Path']]
    rhv = [np.array(d['Readings'])[:,1] for d in data if u_rhv[0] == d['uuid']][0]
    ht = [np.array(d['Readings'])[:,1] for d in data if u_ht[0] == d['uuid']][0]
    dat = [np.array(d['Readings'])[:,1] for d in data if u_dat[0] == d['uuid']][0]
    cl = [np.array(d['Readings'])[:,1] for d in data if u_cl[0] == d['uuid']][0]
class ZoneController(driver.SmapDriver):
    def setup(self, opts):
        self.rate = float(opts.get('rate',10))
        # Current state of the points
        self.heatSP=int(opts.get('defaultHeatSetpoint',68))
        self.coolSP=int(opts.get('defaultCoolSetpoint',76))

        self.therm_temp = 70

        self.trim = int(opts.get('trim',0)) # dummy zoneCtrl action

        # create timeseries for zone controller actions
        heatSetPoint = self.add_timeseries('/heatSetpoint', 'F', data_type='double')
        coolSetPoint = self.add_timeseries('/coolSetpoint', 'F', data_type='double')
        # add actuators to them
        heatSetPoint.add_actuator(setpointActuator(controller=self, range=(40,90)))
        coolSetPoint.add_actuator(setpointActuator(controller=self, range=(40,90)))

        # get master set point stream paths
        self.archiver_url = opts.get('archiver_url','http://localhost:8079')
        self.heatSPwhere = opts.get('heatSPwhere', '')
        self.coolSPwhere = opts.get('coolSPwhere', '')
        self.thermwhere = opts.get('thermwhere', '')
        self.tempwhere = opts.get('tempwhere', '')

        print "ZoneController: heat sp where = ", self.heatSPwhere
        print "ZoneController: cool sp where = ", self.coolSPwhere
        print "ZoneController: thermostat where = ", self.thermwhere
        print "ZoneController: temp sensor where = ", self.tempwhere

        self.client = SmapClient(self.archiver_url)

        self.heatSPclient = RepublishClient(self.archiver_url, self.heatSPcb, restrict=self.heatSPwhere)
        self.coolSPclient = RepublishClient(self.archiver_url, self.coolSPcb, restrict=self.coolSPwhere)
        #self.tempclient = RepublishClient(self.archiver_url, self.tempcb, restrict=self.tempwhere)
        self.thermclient = RepublishClient(self.archiver_url, self.thermcb, restrict=self.thermwhere)


    def start(self):
        print "zone controller start: ", self.rate
        self.heatSPclient.connect() # activate subscription scheduler setpoints
        self.coolSPclient.connect() 
        #self.tempclient.connect() 
        self.thermclient.connect() 
        periodicSequentialCall(self.read).start(self.rate)

    def read(self):
        all_readings = self.client.latest(self.tempwhere)
        for p in all_readings:
            print '-'*20
            md = self.client.tags('uuid = "'+p['uuid']+'"')[0]
            print 'Room:', md['Metadata/Room']
            print 'Reading:', p['Readings'][0][1]
            ts = dtutil.ts2dt(p['Readings'][0][0]/1000)
            print 'Time:', dtutil.strftime_tz(ts, tzstr='America/Los_Angeles')
        avg_room_temp = sum([x['Readings'][0][1] for x in all_readings]) / float(len(all_readings))

        # get difference between avg room temperature and thermostat temperature
        new_diff = self.therm_temp - avg_room_temp

        # periodically update output streams.  Here a bogus adjustment
        self.add('/heatSetpoint', self.heatSP + new_diff)
        self.add('/coolSetpoint', self.coolSP + new_diff)
        print "zone controller publish: ", self.heatSP, self.coolSP

    # Event handler for publication to heatSP stream
    def heatSPcb(self, _, data):
        # list of arrays of [time, val]
        print "ZoneController heatSPcb: ", data
        mostrecent = data[-1][-1] 
        self.heatSP = mostrecent[1]

    def coolSPcb(self, _, data):
        # list of arrays of [time, val]
        print "ZoneController coolSPcb: ", data
        mostrecent = data[-1][-1] 
        self.coolSP = mostrecent[1]

    def tempcb(self, _, data):
        # list of arrays of [time, val]
        print "ZoneController tempcb: ", data


    def thermcb(self, _, data):
        # list of arrays of [time, val]
        print "ZoneController thermcb: ", data
        self.therm_temp = data[-1][-1][1]
Exemplo n.º 13
0
             'Plant/Condenser.HXS-T',
             'CW_Pump_1/Analog_Values.AV-8',
             'CW_Pump_7/Analog_Values.AV-8',
             'CW_Pump_2/Analog_Values.AV-8',
             'CW_Pump_8/Analog_Values.AV-8',
             'Cool_Tower_Fan/Analog_Values.AV-8',
             'HW_Pump_7/Analog_Values.AV-8',
             'HW_Pump_5/Analog_Values.AV-8'
             ]

restrict = " Metadata/SourceName = '%s' and "%source\
                 + ' and '.join(["Path ~ '%s'"] * len(path_list_and))\
                 %tuple(path_list_and) + " and ("\
                 + ' or '.join(["Path ~ '%s'"] * len(path_list_or)) \
                 %tuple(path_list_or) + ")"
tags = c.tags(restrict)

startDate = "01/01/2014"
endDate = "01/02/2014"
print "Start date: ", str(startDate), 
print "End date: ", str(endDate)
name = 'Brower_data_V3.csv' 
dt_format = '%Y-%m-%d %H:%M:%S'
query_data = 'apply window(first, field="hour") to data in ("' + startDate + '" , "' + str(endDate) + '") limit 10000000 where' + restrict
data = c.query(query_data)
N=len(data)
df = pd.DataFrame()
for i in range(N):
  d = np.array(data[i]['Readings'])
  if d.any():
    df['timestamp'] = d[:,0]
Exemplo n.º 14
0
# start and end values are Unix timestamps
t_start = "1-1-2012 0:00"
t_end = "1-1-2013 0:00"
start = 1000*dtutil.dt2ts(dtutil.strptime_tz(t_start, "%m-%d-%Y %H:%M"))
end   = 1000*dtutil.dt2ts(dtutil.strptime_tz(t_end, "%m-%d-%Y %H:%M"))

stnc = "select distinct Path where Metadata/SourceName='Cory Hall Dent Meters' and Path ~ '(?i)power$' and not Path ~ '.*ABC.*'"
pathlist = c.query(stnc) #the result is a list

pathlist = pathlist[275:] 
for path in pathlist:
	print "==========Fetching streams in path %s=========="%path
	for s in path:
		# fetch the metadata of path wanted
		tags = c.tags("Path='%s'"%path)

		# mkdir for each path
		path1 = "/Users/hdz_1989/Downloads/SDB/Cory"
		# folder = path
		# if not os.path.exists(path1+'/'+folder):
		# 	os.makedirs(path1+'/'+folder)

		# ft = open(path+'/'+folder+'/' + 'date.txt', 'w')
		# ft.write(t_start + ' ~ ' + t_end)
		# ft.close()

		for timeseries in tags:
			uuid = timeseries['uuid']
			# filename = timeseries['Path'].split('/')[-1]
			filename = timeseries['Path'][1:].replace('/','_')
Exemplo n.º 15
0
 def load_tags(self):
     """Load the matching tags (in a thread)"""
     c = SmapClient(self.source_url)
     return c.tags(self.restrict)
Exemplo n.º 16
0
from smap.contrib import dtutil

from matplotlib import pyplot
from matplotlib import dates
import os
# make a client
c = SmapClient("http://new.openbms.org/backend")

# start and end values are Unix timestamps
t_start = "11-4-2012 00:00"
t_end = "12-1-2012 23:59"
start = 1000*dtutil.dt2ts(dtutil.strptime_tz(t_start, "%m-%d-%Y %H:%M"))
end   = 1000*dtutil.dt2ts(dtutil.strptime_tz(t_end, "%m-%d-%Y %H:%M"))

# download the metadata of path wanted
tags = c.tags("Path like '/sdh_co2/13%'")

# make a dict mapping uuids to data vectors
path = "/Users/hdz_1989/Downloads/SDB/SDH"
folder = tags[0]['Path'].split('/')[-2]
if not os.path.exists(path+'/'+folder):
	os.makedirs(path+'/'+folder)

ft = open(path+'/'+folder+'/' + 'date.txt', 'w')
ft.write(t_start + ' ~ ' + t_end)
ft.close()

for timeseries in tags:
	uuid = timeseries['uuid']
	filename = timeseries['Path'].split('/')[-1]
	clause = "select data in (%.0f, %.0f) limit -1 where uuid = '%s'" \
Exemplo n.º 17
0
class AnalyzeData(object):
    def __init__(self):
        # vav,chw,vfd naming
        self.vavs_rhv = [
            'S1-01', 'S1-02', 'S1-03', 'S1-04', 'S1-07', 'S1-08', 'S1-09', 'S1-10', 'S1-13', 'S1-15', 'S1-16', 'S1-17', 'S1-18', 'S1-19', 'S1-20',
            'S2-01', 'S2-02', 'S2-03', 'S2-04', 'S2-05', 'S2-06', 'S2-07', 'S2-10', 'S2-11', 'S2-12', 'S2-13', 'S2-14', 'S2-15', 'S2-16', 'S2-17', 'S2-18', 'S2-19', 'S2-20', 'S2-21',
            'S3-01', 'S3-02', 'S3-03', 'S3-04', 'S3-05', 'S3-06', 'S3-07', 'S3-08', 'S3-09', 'S3-10', 'S3-11', 'S3-12', 'S3-15', 'S3-16', 'S3-17', 'S3-18', 'S3-19', 'S3-20', 'S3-21',
            'S4-01', 'S4-02', 'S4-03', 'S4-04', 'S4-05', 'S4-06', 'S4-07', 'S4-08', 'S4-09', 'S4-11', 'S4-12', 'S4-13', 'S4-15', 'S4-16', 'S4-18', 'S4-19', 'S4-20', 'S4-21',
            'S5-01', 'S5-02', 'S5-03', 'S5-04', 'S5-05', 'S5-06', 'S5-07', 'S5-08', 'S5-09', 'S5-10', 'S5-11', 'S5-12', 'S5-13', 'S5-14', 'S5-16', 'S5-18', 'S5-19', 'S5-20', 'S5-21',
            'S6-01', 'S6-02', 'S6-03', 'S6-04', 'S6-05', 'S6-06', 'S6-07', 'S6-08', 'S6-10', 'S6-11', 'S6-12', 'S6-13', 'S6-15', 'S6-17', 'S6-18', 'S6-19', 'S6-20',
            'S7-01', 'S7-02', 'S7-03', 'S7-04', 'S7-05', 'S7-06', 'S7-07', 'S7-08', 'S7-09', 'S7-10', 'S7-13', 'S7-14', 'S7-15', 'S7-16'
        ]
        self.vavs_no_rhv = [
            'S1-05', 'S1-06', 'S1-14', 'S2-08', 'S2-09', 'S3-13', 'S3-14', 'S4-10', 'S4-14', 'S4-17',
            'S5-15', 'S5-17', 'S6-09', 'S6-14', 'S6-16', 'S7-11', 'S7-12'
        ]
        self.vavs = self.vavs_rhv + self.vavs_no_rhv
        self.chw_coils = ['AH2A', 'AH2B']
        self.vfds = ['AH2A', 'AH2B']
        # define component fields
        self.components_by_type = {
            'vfds': self.vfds,
            'chw_coils': self.chw_coils,
            'vavs_rhv': self.vavs_rhv,
            'vavs_no_rhv': self.vavs_no_rhv
        }
        self.datapoints_by_type = {
            'vfds': ['SF_VFD:POWER'],
            'chw_coils': ['SAT', 'MAT', 'CCV', 'SF_CFM'],
            'vavs_no_rhv': ['AIR_VOLUME', 'CTL_FLOW_MIN', 'CTL_FLOW_MAX', 'CTL_STPT'],
            'vavs_rhv': ['AIR_VOLUME', 'CTL_FLOW_MIN', 'CTL_FLOW_MAX', 'CTL_STPT', 'VLV_POS',
                         'CLG_LOOPOUT', 'HTG_LOOPOUT', 'AI_3', 'rhv_closed_temp_change']
        }
        self.unit_of_estimated_data = {
            'rhw_cost': '$/hr', 'fan_cost': '$/hr', 'chw_cost': '$/hr', 'tot_cost': '$/hr',
            'chw_power': 'kW', 'chw_power_AH2A': 'kW', 'chw_power_AH2B': 'kW', 'rhw_power': 'kW',
            'fan_power': 'kW', 'fan_power_from_regression_curve': 'kW',
            'fan_power_AH2A': 'kW', 'fan_power_AH2B': 'kW', 'ahu_afr': 'cfm'
        }
        # define data restriction
        self.restrict_central = "Metadata/SourceName = 'Sutardja Dai Hall BACnet' and Path ~ 'AH2' and " \
            + "(Path ~ 'SAT' or Path ~ 'MAT' or Path ~ 'SF_CFM' or Path ~ 'CCV' or Path ~ 'SF_VFD:POWER') and " \
            + "not (Path ~ 'STP' or Path ~ 'RAH')"
        self.restrict_local = "Metadata/SourceName = 'Sutardja Dai Hall BACnet' and Path ~ 'S[0-9]-[0-9][0-9]' and " \
           + "not (Path ~ 'act') and (Path ~ 'AI_3' or Path ~ 'VLV_POS' or Path ~ 'HTG_LOOPOUT' or " \
           + "Path ~ 'CLG_LOOPOUT' or Path ~ 'AIR_VOLUME' or Path ~ 'ROOM_TEMP' or " \
           + "Path ~ 'CTL_FLOW_MIN' or Path ~ 'CTL_FLOW_MAX' or Path ~ 'CTL_STPT')"
        self.restrict_result = "Metadata/SourceName = 'Sutardja Dai Hall SAT Reset' and Path ~ 'SAT_Reset_debug_20160719' and " \
           + "Path ~ 'rhv_closed_temp_change' or Path ~ 'computed_sat' and " \
           + "not Path ~ 'SAT_Reset_debug_20160719_C'"
        self.restrict_new = "(%s) or (%s) or (%s)" % \
                            (self.restrict_central, self.restrict_local, self.restrict_result)
        self.restrict_oat = "Metadata/SourceName = 'Sutardja Dai Hall BACnet' and Path = '/Siemens/SDH.PXCM-08/SDH/OAT'"
        # creat smap client object
        self.archiver_url = 'http://new.openbms.org/backend'
        self.smap_client = SmapClient(self.archiver_url)
        # parameters
        self.limit = 40

    def download_data(self,timestamp):
        # store uuid and data
        self.data = {}
        tags = self.smap_client.tags(self.restrict_new)
        fetched_data = self.smap_client.prev(self.restrict_new, ref=timestamp, streamlimit=10000, limit=self.limit)
        print 'Total number of data stream is: %s' % len(fetched_data)
        for component_type in self.datapoints_by_type.keys():
            for component in self.components_by_type[component_type]:
                for datapoint in self.datapoints_by_type[component_type]:
                    uuid = [tag['uuid'] for tag in tags if (datapoint in tag['Path'] and component in tag['Path'])][0]
                    download_data = [np.array(d['Readings']) for d in fetched_data if d['uuid'] == uuid][0]
                    self.data['-'.join([component, datapoint, 'uuid'])] = uuid
                    self.data['-'.join([component, datapoint, 'value'])] = download_data[:,1]
                    self.data['-'.join([component, datapoint, 'time'])] = download_data[:,0]
                    self.data['-'.join([component, datapoint, 'dt'])] = [datetime.fromtimestamp(t/1000).strftime('%Y-%m-%d %H:%M:%S') for t in download_data[:,0]]
                    # print 'Time of %s: %s' % \
                    #       ('-'.join([component, datapoint]), self.data['-'.join([component, datapoint, 'dt'])][0])
        oat = self.smap_client.prev(self.restrict_oat, ref=timestamp)[0]['Readings'][0]
        self.data['OAT'] = oat[:,1]
        self.data['OAT-dt'] = [datetime.fromtimestamp(t/1000).strftime('%Y-%m-%d %H:%M:%S') for t in oat[:,0]]
        u_sat = [tag['uuid'] for tag in tags if ('computed_sat' in tag['Path'])][0]
        computed_sat = [np.array(d['Readings']) for d in fetched_data if d['uuid'] == u_sat][0]
        self.data['computed_sat'] = computed_sat[:,1]
        self.data['computed_sat-time'] = computed_sat[:,0]
        self.data['computed_sat-dt'] = [datetime.fromtimestamp(t/1000).strftime('%Y-%m-%d %H:%M:%S') for t in computed_sat[:,0]]
        self.data['SAT-avg-value'] = np.mean([self.data[coil + '-SAT-value'] for coil in self.chw_coils], axis=0)
        self.data['MAT-avg-value'] = np.mean([self.data[coil + '-MAT-value'] for coil in self.chw_coils], axis=0)
        self.data['AFR-ahu-tot-value'] = np.sum([self.data[coil + '-SF_CFM-value'] for coil in self.chw_coils], axis=0)
        self.data['AFR-zone-tot-value'] = np.sum([self.data[vav + '-AIR_VOLUME-value'] for vav in self.vavs], axis=0)
        return self.data
Exemplo n.º 18
0
import re
import pprint as pp
from matplotlib import pyplot, dates

c = SmapClient(base='http://new.openbms.org/backend',
               key=['XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8'])

source = "Metadata/SourceName = 'Sutardja Dai Hall BACnet'"
restrict = source + " and Path ~ 'S[0-9]-[0-9][0-9]' and (Path ~ 'AIR_VOLUME' or  Path ~ 'AI_3' or Path ~ 'ROOM_TEMP')"
# start = date(2015, 10, 23)
# start_timestamp = time.mktime(start.timetuple()) 

t = time.strptime("10 Mar 16 13 40 00", "%d %b %y %H %M %S")
timestamp = time.mktime(t)

tags = c.tags(restrict)
data = c.prev(restrict, ref=timestamp, limit=1440, streamlimit=10000)
pp.pprint(data[0])
print 'length of data: '+str(len(data))


# name = 'zone_data.csv'
# f = open(name,'w')
# csv_writer = csv.writer(f)
# headers = ['Zone Name'
#            'Time', 
#            'Zone airflow [cfm]', 
#            'Zone discharge air temperature [F]',
#            'Zone temperature [F]']
# csv_writer.writerow(headers)
# f.close()
Exemplo n.º 19
0
stnc = "select distinct Metadata/Location/RoomNumber where Metadata/SourceName='KETI Motes'"
roomlist = c.query(stnc) #the result is a list

#roomlist = roomlist[16:]
#roomlist = ['621A','621B','621C','621D','621E']
for room in roomlist:
	print "==========Fetching streams in Room %s=========="%room
	stnc = "select Path where Metadata/Location/RoomNumber='%s' and not Path ~ '.*pir.*'" %room
	streams = c.query(stnc)
	if len(streams)>0:
		# print "----%d streams in Room %s----"%(len(streams), room)

		for s in streams:
			# fetch the metadata of path wanted
			tags = c.tags("Path='%s'"%s['Path'])

			# mkdir for each room
			path = "/Users/hdz_1989/Documents/Dropbox/SDB/KETI_tmp"
			folder = room
			if not os.path.exists(path+'/'+folder):
				os.makedirs(path+'/'+folder)

			# ft = open(path+'/'+folder+'/' + 'date.txt', 'w')
			# ft.write(t_start + ' ~ ' + t_end)
			# ft.close()

			for timeseries in tags:
				uuid = timeseries['uuid']
				filename = timeseries['Path'].split('/')[-1]
				clause = "select data in (%.0f, %.0f) limit -1 where uuid = '%s'" \
Exemplo n.º 20
0
import pdb, time, datetime
import numpy as np
import pdb
import smtplib
import pprint as pp

c = SmapClient("http://new.openbms.org/backend")
t = time.time()
source = 'Sutardja Dai Hall TAV'
error_uuid_list = ['36335391-49a6-5b21-9f00-fcce66eb5a74']
N = len(error_uuid_list)
where = "Metadata/SourceName = '%s' and (" %(source)\
                    + ' or\n '.join(["uuid = '%s'"] * N) \
                    %tuple(error_uuid_list) + ")"
tags = c.tags(where)
error_dict = {}
error_list = [[1441431059000.0, 1.0], [1441430819000.0, 1.0]]
#error_list = [[1441605186000.0, 0.0]]
pp.pprint(error_list)
for tag in tags:
    name = str(tag['Path'].split('/')[1])
    u = str(tag['uuid'])
    if name not in error_dict:
        error_dict[name] = {}
        if u not in error_dict[name]:
            error_dict[name]['uuid'] = u
            error_dict[name]['Readings'] = []


def error_array_fill():