Esempio n. 1
0
    def setup(self, opts):
        self.restrict = opts.get("Restrict")
        self.group = opts.get("Group")
        self.opstr = opts.get("Operator")
        OperatorDriver.setup(self, opts, self.restrict, shelveoperators=False)

        # look up the streams, units, and group tags.
        client = SmapClient()
        streams = client.tags(self.restrict, '*')
                              # 'uuid, Properties/UnitofMeasure, Metadata/SourceName, %s' % 
                              # self.group)
        #print streams
        groupitems = {}

        # find the groups
        for s in streams:
            if not s[self.group] in groupitems:
                groupitems[s[self.group]] = []
            groupitems[s[self.group]].append(s)

        # instantiate one operator per group with the appropriate inputs
        for group, tags in groupitems.iteritems():
            inputs = map(operator.itemgetter('uuid'), tags)
            op = self.operator_class(tags)
            path = '/' + util.str_path(group)
            self.add_operator(path, op)
Esempio n. 2
0
    def setup(self, opts):
        """Set up what streams are to be subsampled.

        We'll only find new streams on a restart ATM.
        """
        restrict = opts.get(
            "Restrict", "has Path and (not has Metadata/Extra/SourceStream)")
        OperatorDriver.setup(self,
                             opts,
                             shelveoperators=False,
                             raw=True,
                             inherit_metadata=False)
        client = SmapClient(smapconf.BACKEND)
        source_ids = client.tags(restrict, 'uuid, Properties/UnitofMeasure')
        for new in source_ids:
            id = str(new['uuid'])
            if not 'Properties/UnitofMeasure' in new:
                new['Properties/UnitofMeasure'] = ''
            if not id in self.operators:
                o1 = SubsampleOperator([new], 300)
                self.add_operator('/%s/%s' % (id, o1.name), o1)
                o2 = SubsampleOperator([new], 3600)
                self.add_operator('/%s/%s' % (id, o2.name), o2)
        log.msg("Done setting up subsample driver; " + str(len(source_ids)) +
                " ops")
Esempio n. 3
0
    def setup(self, opts, restrict=None, shelveoperators=False, cache=True, raw=False):
        self.load_chunk_size = datetime.timedelta(hours=int(opts.get('ChunkSize', 24)))
        self.source_url = opts.get('SourceUrl', 'http://new.openbms.org/backend')
#        self.source_url = opts.get('SourceUrl', 'http://ar1.openbms.org:8079')
        if not raw and restrict:
            self.restrict = '(' + restrict + ') and not has Metadata/Extra/Operator'
        else:
            self.restrict = restrict

        if shelveoperators:
            self.operators = shelve.open(opts.get('OperatorCache', '.operators'),
                                         protocol=2, writeback=True)
            # sync the operator state periodically and at exit
            util.periodicCallInThread(self.operators.sync).start(60)
            reactor.addSystemEventTrigger('after', 'shutdown', 
                                          self.operators.close)
        else:
            self.operators = {}
            self.oplist = []
        self.arclient = SmapClient(self.source_url)
        self.cache = cache

        # create timeseries from cached operator state
        for sid, oplist in self.operators.iteritems():
            for path, op in oplist.itervalues():
                self.add_operator(path, op)
Esempio n. 4
0
 def current(self):
     c = SmapClient()
     latest = c.latest(self.select, streamlimit=1000)
     test = self.get_test()
     levels = []
     for v in latest:
         if len(v['Readings']):
             level = test(v['Readings'][0][1])[0]
             v['level'] = {
                 "priority": level.priority,
                 "description": level.description,
             }
             levels.append(level)
     return latest, max(levels)
Esempio n. 5
0
 def setup(self, opts):
     url = opts.get('url', 'http://new.openbms.org/backend')
     self.client = SmapClient(url)
     self.limit = float(opts.get('limit', 300))  # Seconds
     self.rate = float(opts.get('rate', 300))
     self.alert_interval = float(opts.get(
         'alert_interval', 86400))  # Minimum seconds between alerts
     smtp_server = opts.get('smtp_server')
     self.server = smtplib.SMTP(smtp_server, 587)
     self.server.starttls()
     self.email = opts.get('email_address')
     pw = opts.get('password')
     self.server.login(self.email, pw)
     self.restrict = opts.get('restrict')
     self.recipient = opts.get('recipient')
     self.carrier = opts.get('carrier')
Esempio n. 6
0
    def send_alert(self, to, alert, streams, level):
        # look up the tags for these streams
        uuids = set(streams.keys())
        uuids = map(lambda u: "uuid = '%s'" % u, uuids)
        client = SmapClient()
        tags = client.tags(' or '.join(uuids), nest=True)
        tags = dict(((x['uuid'], x) for x in tags))

        def make_context(params):
            rv = []
            for uid, state in params.iteritems():
                t, v = state['time'], state['value']
                if uid in tags:
                    rv.append(tags[uid])
                    rv[-1]['AlertTime'] = time.ctime(t / 1000)
                    rv[-1]['AlertValue'] = v
            return rv

        context = make_context(streams)
        logentry = Log(alert=alert, when=datetime.datetime.now())
        logentry.save()

        # generate the text to send, by building a context for our
        # template.
        template = Template(self.template)
        context = Context({
            'streams':
            context,
            'level':
            level,
            'permalink':
            settings.ROOT_NETLOC + '/admin/alert/log/' + str(logentry.id),
            'alarmpage':
            settings.ROOT_NETLOC + '/admin/alert/alert/' + str(alert.id),
            'timestamp':
            logentry.when,
            'alarm':
            alert.__unicode__(),
        })
        logentry.message = template.render(context)
        print logentry.message
        logentry.save()

        emaillib.send(to, '%s from %s' % (level, settings.ROOT_NETLOC),
                      logentry.message)
Esempio n. 7
0
    def read_stream_data(self, num_days=1):
        self.points = {}
        c = SmapClient("http://new.openbms.org/backend")
        for point in self.input_variables:
            q = "apply window(mean, field='second',width='%d') to data in (\"03/01/2015\" -%ddays, \"03/07/2015\") where Metadata/Name='%s'" % \
               ( self.rate,  num_days,  point )

            print q
            result = c.query(q)
            readings = result[0]["Readings"]
            self.points[point] = [r[1] for r in result[0]["Readings"]]

        for point in self.state_variables:
            query = "apply window(mean, field='second',width='%d') to data in (\"03/01/2015\" -%ddays, \"03/07/2015\") where Metadata/Name='%s'" % \
               ( self.rate,  num_days,  point )

            result = c.query(query)
            readings = result[0]["Readings"]
            self.points[point] = [r[1] for r in result[0]["Readings"]]

        self.predictions = []
        self.model_params = []
        self.actual_outputs = []
Esempio n. 8
0
 def load_tags(self):
     """Load the matching tags (in a thread)"""
     c = SmapClient(self.source_url)
     return c.tags(self.restrict)
Esempio n. 9
0
from smap.archiver.client import SmapClient
from smap.contrib import dtutil
import numpy as np
import pandas as pd
import datetime
import subprocess

#Link to download the data
c = SmapClient("http://iiitdarchiver.zenatix.com:9105")

#Range of dates to which you want to download the data

start = dtutil.dt2ts(dtutil.strptime_tz("01-10-2017", "%d-%m-%Y"))
end = dtutil.dt2ts(dtutil.strptime_tz("01-10-2017", "%d-%m-%Y"))

# hard-code the UUIDs we want to download
oat = ["eec41258-f057-591e-9759-8cfdeb67b9af"]

# Function to perform the download of the data
data = c.data_uuid(oat, start, end)

t = np.array(data)
df = pd.DataFrame(t)

# creating files after downloading
for i, j in enumerate(t):
    name = str(i) + '.csv'
    with open(name, 'w') as f:
        for time, val in j:
            f.write(
                str(datetime.datetime.fromtimestamp(time / 1000.0)) + ' , ' +
Esempio n. 10
0
import os
import sys
from smap.archiver.client import SmapClient
from smap.contrib import dtutil
import re
import json

inf = open("room_metadata")
roomMetadata = json.load(inf)

c = SmapClient("http://new.openbms.org/backend")

outputDir = "data"
if len(sys.argv) < 2:
    print "No output directory provided. Using default <data>"
else:
    outputDir = sys.argv[1].strip()

if os.path.exists(outputDir):
    if not os.path.isdir(outputDir):
        print "File with the same name exists. Delete it first"
        exit()
else:
    os.makedirs(outputDir)

startDate = "05/27/2015"
endDate = "07/04/2015"

numRooms = len(roomMetadata)
count = 0
for room in roomMetadata:
Esempio n. 11
0
from smap.util import periodicSequentialCall
from smap.contrib import dtutil
from smap.util import find
from datetime import timedelta, date

import numpy as np
import pandas as pd
import pdb
import csv
import shutil
import time
import pprint as pp
import datetime

c = SmapClient(base='http://new.openbms.org/backend',\
                               key=['WE4iJWG7k575AluJ9RJyAZs25UO72Xu0b4RA',\
                                    'SA2nYWuHrJxmPNK96pdLKhnSSYQSPdALkvnA'])
t = time.time()
source = 'Brower BACnet'
path_list_and = ['Brower', 'Field_Bus1']
path_list_or=[
             'BrowerAHU2/DA-T',
             'BrowerAHU2/OA-T',
             'BrowerAHU2/SF-value',
             'BrowerAHU2/SF-speed',
             'Plant/Condenser.CWP7-speed',
             'Plant/Condenser.CWP8-speed',
             'Plant/Condenser.CWS-T',
             'Plant/Condenser.CWR-T',
             'Plant/Condenser.HXR-T',
             'Plant/Condenser.HXS-T',
Esempio n. 12
0
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
Keti mote protocol implementation and sMAP driver.

@author Stephen Dawson-Haggerty <*****@*****.**>
"""

import datetime

from smap.archiver.client import SmapClient
from smap.contrib import dtutil
import numpy as np
import matplotlib.pyplot as plt

c = SmapClient('http://ar1.openbms.org:8079')

HOURS = 5
RATES = [#("#", 10), 
         ("ppm", 5), ("C", 5)]

prrs = []
for unit, rate in RATES:
    counts = c.query(("apply count to data in now -%ih, now "
                      "limit -1 streamlimit 1000 where "
                      "Properties/UnitofMeasure = '%s' and "
                      "Metadata/SourceName = 'KETI Motes'") %
                     (HOURS, unit))
    for v in counts:
        r = np.array(v['Readings'])
        if len(r):
Esempio n. 13
0
import os
import sys
from smap.archiver.client import SmapClient
from smap.contrib import dtutil
import re
import json

c = SmapClient("http://new.openbms.org/backend",
               key="NAXk19YY45TTiXlajiQGQ8KTp283oHfp2Uly")
rooms = c.query("select distinct Metadata/room-id where Metadata/site='STA'")
metadata = {}
count = 0
numRooms = len(rooms)
for room in rooms:
    count += 1
    print "Building Metadata for room : %s (%d/%d)" % (room, count, numRooms)
    metadata[room] = {}
    sensors = c.query("select * where Metadata/room-id='" + str(room) +
                      "' and Metadata/site='STA'")
    for i in range(len(sensors)):
        if "Name" not in sensors[i]["Metadata"]:
            continue
        pointName = sensors[i]["Metadata"]["Name"]
        roomMetadata = sensors[i]["Metadata"]

        if "room_temp" in roomMetadata:
            metadata[room]["room_temp"] = pointName
        if "supply_air_velocity" in roomMetadata or "supply_air_volume" in roomMetadata:
            metadata[room]["supply_air_velocity"] = pointName
        if "reheat_valve_position" in roomMetadata:
            metadata[room]["reheat_valve_position"] = pointName
Esempio n. 14
0
            'S5-14', 'S5-16', 'S5-18', 'S5-19', 'S5-20', 'S5-21',
            'S6-01', 'S6-02', 'S6-03', 'S6-04', 'S6-05', 'S6-06',
            'S6-07', 'S6-08', 'S6-10', 'S6-11', 'S6-12', 'S6-13',
            'S6-15', 'S6-17', 'S6-18', 'S6-19', 'S6-20', 'S7-01',
            'S7-02', 'S7-03', 'S7-04', 'S7-05', 'S7-06', 'S7-07',
            'S7-08', 'S7-09', 'S7-10', 'S7-13', 'S7-14', 'S7-15',
            'S7-16'
           ]
rh_coils = dict(zip(rh_coils,[2.0]*len(rh_coils)*pq.F))
rh_stream_names = []
for name in rh_coils:
  rh_stream_names += ['coil_closed_temp_change_' + name] + \
                     ['hot_water_' + name] + \
                     ['instantaneous_zone_load_' + name]

c = SmapClient(base='http://new.openbms.org/backend',\
               key='XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8')

# Query necessary tags for energy data
source_energy = 'Sutardja Dai Hall Energy Data'
all_points = pointnames + rh_stream_names + chw_stream_names
where_energy = "Metadata/SourceName = '%s' and Path ~ '%s' and (" \
                 %(source_energy, p)\
                 + ' or '.join(["Path ~ '%s'"] * len(all_points))\
                 %tuple(all_points) + ")"
tags_energy = c.tags(where_energy)

# Query data for energy calcs as AHU level
source = 'Sutardja Dai Hall BACnet'
path_list = {
                'AH2A_SF_VFD' : 'SDH/AH2A/SF_VFD:POWER',
                'AH2B_SF_VFD' : 'SDH/AH2B/SF_VFD:POWER',
Esempio n. 15
0
from smap.archiver.client import SmapClient
import time
import datetime
import json
import pandas as pd
#pd.options.display.mpl_style = 'default'

client = SmapClient('http://ciee.cal-sdb.org:8079')
# timestamps
end = int(time.time())
start = end - 60*60*24*30 # last month
print start, end

def get_demand():
    # get energy data for same timeframe
    res = client.query('select uuid where Metadata/System = "Monitoring" and Properties/UnitofMeasure = "kW"')
    uuids = [x['uuid'] for x in res]
    data = dict(zip(uuids,client.data_uuid(uuids, start, end, cache=False)))

    # create dataframe, use time as index
    demand = pd.DataFrame(data.values()[0])
    demand[0] = pd.to_datetime(demand[0], unit='ms')
    demand.index = demand[0]
    del demand[0]
    return demand

def get_hvacstates():
    # get all hvac_state timeseries
    res = client.query('select uuid where Metadata/System = "HVAC" and Path like "%hvac_state"')
    uuids = [x['uuid'] for x in res]
    data = dict(zip(uuids,client.data_uuid(uuids, start, end, cache=False)))
Esempio n. 16
0
from smap.archiver.client import SmapClient
from datetime import timedelta, date

import sys, os, pdb
import time
import pandas as pd
import numpy as np
import datetime
import math

c = SmapClient(base='http://new.openbms.org/backend')
#               key=['XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8'])
uuid_dict = {
    'uuid1': {
        'u': "b7051656-d8d5-53dd-9221-15de4ce84e43",
        'name': "MSA.MAIN.PWR_REAL_3_P"
    },
    'uuid2': {
        'u': "cc1dfe56-3abc-544e-add6-1bc88712fc90",
        'name': "MSB.MAIN.PWR_REAL_3_P"
    }
}
restrict = 'Metadata/SourceName = "Sutardja Dai Hall BACnet" and (uuid ="%s" or uuid = "%s")'\
            %(str(uuid_dict['uuid1']['u']), str(uuid_dict['uuid2']['u']))
#pdb.set_trace()
#TODO: Change Date range here
startF = date(2015, 7, 17)
endF = date(2016, 7, 17)

#Create an empty dataframe to put the data in
dts_startF = time.mktime(startF.timetuple())