class SMAPDataRetriever(object): def __init__(self): self.smapc = SmapClient("http://new.openbms.org/backend") def get_data(self, room, building, startDate, endDate): print "Pulling in data for room : ", room data_temp = self.smapc.query("apply window(mean, field='minute',width=10) to data in ('%s','%s') where Metadata/Name='STA_%s____ART'" % (startDate, endDate, room)) data_reheat = self.smapc.query("apply window(mean, field='minute',width=10) to data in ('%s','%s') where Metadata/Name='STA_%s____RVP'" % (startDate, endDate, room)) data_flow = self.smapc.query("apply window(mean, field='minute',width=10) to data in ('%s','%s') where Metadata/Name='STA_%s___SVEL'" % (startDate, endDate, room)) data_oat = self.smapc.query("apply window(mean, field='minute',width=10) to data in ('%s','%s') where Metadata/Name='STA_%s__OAT'" % (startDate, endDate, building)) data = {} for reading in data_temp[0]["Readings"]: data[int(reading[0])] = {} data[int(reading[0])]["temp"] = float(reading[1]) for reading in data_reheat[0]["Readings"]: data[int(reading[0])]["reheat"] = float(reading[1]) for reading in data_flow[0]["Readings"]: data[int(reading[0])]["flow"] = float(reading[1]) for reading in data_oat[0]["Readings"]: data[int(reading[0])]["outtemp"] = float(reading[1]) return data
def query_data_old(VAV_Obj, sensor_name, start_date='4/1/2015', end_date='4/2/2015', interpolation_time='5min', limit=-1, externalID=None, useOptions=False): if useOptions: start_date = Options.data['starttime'] end_date = Options.data['endtime'] interpolation_time = Options.data['interpolationtime'] limit = eval(Options.data['limit']) client_obj = SmapClient(VAV_Obj.serverAddr) if (VAV_Obj.sensors is None or VAV_Obj.sensors.get(sensor_name) is None) and externalID is None: print 'no ' + sensor_name + ' info' return None if externalID is None: sensorID = VAV_Obj.sensors.get(sensor_name)[0] else: sensorID = externalID if start_date is None and end_date is None: #print 'select data before now limit ' + str(limit) + ' where uuid = \'' + self.sensors.get(sensor_name)[0] + '\'' # x = client_obj.query('select data before now limit ' + str(limit) + ' where uuid = \'' + self.sensors.get(sensor_name)[0] + '\'') q = client_obj.query('select data before now limit ' + str(limit) + ' where uuid = \'' + sensorID + '\'') else: #print 'select data in (\'' + start_date + '\', \'' + end_date + '\') limit ' + str(limit) + ' where uuid = \'' + self.sensors.get(sensor_name)[0] + '\'' q = client_obj.query('select data in (\'' + start_date + '\', \'' + end_date + '\') limit ' + str(limit) + ' where uuid = \'' + sensorID + '\'') data_table = pd.DataFrame(q[0]['Readings'], columns=['Time', sensor_name]) data_table['Time'] = pd.to_datetime(data_table['Time'].tolist(), unit='ms').tz_localize('UTC').tz_convert('America/Los_Angeles') data_table.set_index('Time', inplace=True) data_table = data_table.groupby(pd.TimeGrouper(interpolation_time)).mean().interpolate(method='linear').dropna() return data_table
def queryData(address, where=None, fullQ=None): c = SmapClient(address) if fullQ is None: qList = c.query("select uuid, Path where " + where) else: qList = c.query(fullQ) return qList
def setup(self, opts): """Set up what streams are to be subsampled. We'll only find new streams on a restart ATM. """ restrict = opts.get( "Restrict", "has Path and (not has Metadata/Extra/SourceStream)") OperatorDriver.setup(self, opts, shelveoperators=False, raw=True, inherit_metadata=False) client = SmapClient(smapconf.BACKEND) source_ids = client.tags(restrict, 'uuid, Properties/UnitofMeasure') for new in source_ids: id = str(new['uuid']) if not 'Properties/UnitofMeasure' in new: new['Properties/UnitofMeasure'] = '' if not id in self.operators: o1 = SubsampleOperator([new], 300) self.add_operator('/%s/%s' % (id, o1.name), o1) o2 = SubsampleOperator([new], 3600) self.add_operator('/%s/%s' % (id, o2.name), o2) log.msg("Done setting up subsample driver; " + str(len(source_ids)) + " ops")
def openBMS(nodeDic): resList = [] c = SmapClient('http://ar1.openbms.org:8079') #counts = c.query(("apply count to data in (%s) streamlimit 50 " counts = c.query(("apply count to data in (%s) limit -1 " "where Metadata/Instrument/PartNumber = '%s'") % (nodeDic['periodStr'], nodeDic['id'])) for v in counts: #print "readings : %s" % v['Readings'] r = np.array(v['Readings']) Bcnt = int(v['Metadata']['Instrument']['PartNumber']) if len(r): if( 0 < (Bcnt-1600) and (Bcnt-1600) < 151 ): Pcnt = np.sum(r[:, 1]) * 100 / (int(nodeDic['period']) * 60 / 5) print "THL : %s (%s %s)" % \ (v['Metadata']['Instrument']['PartNumber'], Pcnt, "%") print "\t(cnt : %s) - %s" % (np.sum(r[:, 1]), v['Properties']['UnitofMeasure']) elif( 0 < (Bcnt-2000) and (Bcnt-2000) < 151 ): Pcnt = np.sum(r[:, 1]) * 100 / (int(nodeDic['period']) * 60 / 5) print "CO2 : %s (%s %s)" % \ (v['Metadata']['Instrument']['PartNumber'], Pcnt, "%") print "\t(cnt : %s) - %s" % (np.sum(r[:, 1]), v['Properties']['UnitofMeasure']) elif( 0 < (Bcnt-5000) and (Bcnt-5000) < 151 ): Pcnt = np.sum(r[:, 1]) * 100 / (int(nodeDic['period']) * 60 / 10) print "PIR : %s (%s %s)" % \ (v['Metadata']['Instrument']['PartNumber'], Pcnt, "%") print "\t(cnt : %s) - %s" % (np.sum(r[:, 1]), v['Properties']['UnitofMeasure']) resList.append(Pcnt) #print r[:, 0] #date #print r[:, 1] #data #prrs.append(np.sum(r[:, 1]) / (3600 * (HOURS) / rate)) return average(resList)
def setup(self, opts): self.restrict = opts.get("Restrict") self.group = opts.get("Group") self.opstr = opts.get("Operator") OperatorDriver.setup(self, opts, self.restrict, shelveoperators=False) # look up the streams, units, and group tags. client = SmapClient() streams = client.tags(self.restrict, '*') # 'uuid, Properties/UnitofMeasure, Metadata/SourceName, %s' % # self.group) #print streams groupitems = {} # find the groups for s in streams: if not s[self.group] in groupitems: groupitems[s[self.group]] = [] groupitems[s[self.group]].append(s) # instantiate one operator per group with the appropriate inputs for group, tags in groupitems.iteritems(): inputs = map(operator.itemgetter('uuid'), tags) op = self.operator_class(tags) path = '/' + util.str_path(group) self.add_operator(path, op)
def current(self): c = SmapClient() latest = c.latest(self.select, streamlimit=1000) test = self.get_test() levels = [] for v in latest: if len(v['Readings']): level = test(v['Readings'][0][1])[0] v['level'] = { "priority": level.priority, "description": level.description, } levels.append(level) return latest, max(levels)
def setup(self,opts): "" restrict=opts.get('Restrict','') # client=SmapClient() client=SmapClient(HOST) source_ids=client.tags(restrict,'uuid, Properties/UnitofMeasure') for id in source_ids: "" uuid=str(id['uuid']) if not 'Properties/UnitofMeasure' in id: id['Properties/UnitofMeasure'] = '' if not uuid in self.operators: "" RepublishClient()
def setup(self, opts): url = opts.get('url', 'http://new.openbms.org/backend') self.client = SmapClient(url) self.limit = float(opts.get('limit', 300)) # Seconds self.rate = float(opts.get('rate', 300)) self.alert_interval = float(opts.get( 'alert_interval', 86400)) # Minimum seconds between alerts smtp_server = opts.get('smtp_server') self.server = smtplib.SMTP(smtp_server, 587) self.server.starttls() self.email = opts.get('email_address') pw = opts.get('password') self.server.login(self.email, pw) self.restrict = opts.get('restrict') self.recipient = opts.get('recipient') self.carrier = opts.get('carrier')
def setup(self, opts, restrict=None, shelveoperators=False, cache=True, raw=False): self.load_chunk_size = datetime.timedelta(hours=int(opts.get('ChunkSize', 24))) self.source_url = opts.get('SourceUrl', 'http://new.openbms.org/backend') # self.source_url = opts.get('SourceUrl', 'http://ar1.openbms.org:8079') if not raw and restrict: self.restrict = '(' + restrict + ') and not has Metadata/Extra/Operator' else: self.restrict = restrict if shelveoperators: self.operators = shelve.open(opts.get('OperatorCache', '.operators'), protocol=2, writeback=True) # sync the operator state periodically and at exit util.periodicCallInThread(self.operators.sync).start(60) reactor.addSystemEventTrigger('after', 'shutdown', self.operators.close) else: self.operators = {} self.oplist = [] self.arclient = SmapClient(self.source_url) self.cache = cache # create timeseries from cached operator state for sid, oplist in self.operators.iteritems(): for path, op in oplist.itervalues(): self.add_operator(path, op)
def setup(self, opts): self.rate = float(opts.get('rate',10)) # Current state of the points self.heatSP=int(opts.get('defaultHeatSetpoint',68)) self.coolSP=int(opts.get('defaultCoolSetpoint',76)) self.therm_temp = 70 self.trim = int(opts.get('trim',0)) # dummy zoneCtrl action # create timeseries for zone controller actions heatSetPoint = self.add_timeseries('/heatSetpoint', 'F', data_type='double') coolSetPoint = self.add_timeseries('/coolSetpoint', 'F', data_type='double') # add actuators to them heatSetPoint.add_actuator(setpointActuator(controller=self, range=(40,90))) coolSetPoint.add_actuator(setpointActuator(controller=self, range=(40,90))) # get master set point stream paths self.archiver_url = opts.get('archiver_url','http://localhost:8079') self.heatSPwhere = opts.get('heatSPwhere', '') self.coolSPwhere = opts.get('coolSPwhere', '') self.thermwhere = opts.get('thermwhere', '') self.tempwhere = opts.get('tempwhere', '') print "ZoneController: heat sp where = ", self.heatSPwhere print "ZoneController: cool sp where = ", self.coolSPwhere print "ZoneController: thermostat where = ", self.thermwhere print "ZoneController: temp sensor where = ", self.tempwhere self.client = SmapClient(self.archiver_url) self.heatSPclient = RepublishClient(self.archiver_url, self.heatSPcb, restrict=self.heatSPwhere) self.coolSPclient = RepublishClient(self.archiver_url, self.coolSPcb, restrict=self.coolSPwhere) #self.tempclient = RepublishClient(self.archiver_url, self.tempcb, restrict=self.tempwhere) self.thermclient = RepublishClient(self.archiver_url, self.thermcb, restrict=self.thermwhere)
class DataClient(object): def __init__(self,login=None): """""" if login == None: login={} config=Config(Env.getpath('HOME')+'/common/weatherplotter.conf') login['host']=config['smap_server_host'] login['port']=config["smap_server_port"] print login self.login=login else: self.login=login print "http://%(host)s:%(port)s" % self.login self.c = SmapClient("http://%(host)s:%(port)s" % self.login) def get_data(self,uuid,start,end,limit=-1): # print start # print end startTime = dtutil.dt2ts(dtutil.strptime_tz(start, "%m/%d/%Y %H:%M:%S %p" )) endTime = dtutil.dt2ts(dtutil.strptime_tz(end, "%m/%d/%Y %H:%M:%S %p")) return self.c.data_uuid(uuid, startTime, endTime,True,limit)
def send_alert(self, to, alert, streams, level): # look up the tags for these streams uuids = set(streams.keys()) uuids = map(lambda u: "uuid = '%s'" % u, uuids) client = SmapClient() tags = client.tags(' or '.join(uuids), nest=True) tags = dict(((x['uuid'], x) for x in tags)) def make_context(params): rv = [] for uid, state in params.iteritems(): t, v = state['time'], state['value'] if uid in tags: rv.append(tags[uid]) rv[-1]['AlertTime'] = time.ctime(t / 1000) rv[-1]['AlertValue'] = v return rv context = make_context(streams) logentry = Log(alert=alert, when=datetime.datetime.now()) logentry.save() # generate the text to send, by building a context for our # template. template = Template(self.template) context = Context({ 'streams': context, 'level': level, 'permalink': settings.ROOT_NETLOC + '/admin/alert/log/' + str(logentry.id), 'alarmpage': settings.ROOT_NETLOC + '/admin/alert/alert/' + str(alert.id), 'timestamp': logentry.when, 'alarm': alert.__unicode__(), }) logentry.message = template.render(context) print logentry.message logentry.save() emaillib.send(to, '%s from %s' % (level, settings.ROOT_NETLOC), logentry.message)
def __init__(self): # vav,chw,vfd naming self.vavs_rhv = [ 'S1-01', 'S1-02', 'S1-03', 'S1-04', 'S1-07', 'S1-08', 'S1-09', 'S1-10', 'S1-13', 'S1-15', 'S1-16', 'S1-17', 'S1-18', 'S1-19', 'S1-20', 'S2-01', 'S2-02', 'S2-03', 'S2-04', 'S2-05', 'S2-06', 'S2-07', 'S2-10', 'S2-11', 'S2-12', 'S2-13', 'S2-14', 'S2-15', 'S2-16', 'S2-17', 'S2-18', 'S2-19', 'S2-20', 'S2-21', 'S3-01', 'S3-02', 'S3-03', 'S3-04', 'S3-05', 'S3-06', 'S3-07', 'S3-08', 'S3-09', 'S3-10', 'S3-11', 'S3-12', 'S3-15', 'S3-16', 'S3-17', 'S3-18', 'S3-19', 'S3-20', 'S3-21', 'S4-01', 'S4-02', 'S4-03', 'S4-04', 'S4-05', 'S4-06', 'S4-07', 'S4-08', 'S4-09', 'S4-11', 'S4-12', 'S4-13', 'S4-15', 'S4-16', 'S4-18', 'S4-19', 'S4-20', 'S4-21', 'S5-01', 'S5-02', 'S5-03', 'S5-04', 'S5-05', 'S5-06', 'S5-07', 'S5-08', 'S5-09', 'S5-10', 'S5-11', 'S5-12', 'S5-13', 'S5-14', 'S5-16', 'S5-18', 'S5-19', 'S5-20', 'S5-21', 'S6-01', 'S6-02', 'S6-03', 'S6-04', 'S6-05', 'S6-06', 'S6-07', 'S6-08', 'S6-10', 'S6-11', 'S6-12', 'S6-13', 'S6-15', 'S6-17', 'S6-18', 'S6-19', 'S6-20', 'S7-01', 'S7-02', 'S7-03', 'S7-04', 'S7-05', 'S7-06', 'S7-07', 'S7-08', 'S7-09', 'S7-10', 'S7-13', 'S7-14', 'S7-15', 'S7-16' ] self.vavs_no_rhv = [ 'S1-05', 'S1-06', 'S1-14', 'S2-08', 'S2-09', 'S3-13', 'S3-14', 'S4-10', 'S4-14', 'S4-17', 'S5-15', 'S5-17', 'S6-09', 'S6-14', 'S6-16', 'S7-11', 'S7-12' ] self.vavs = self.vavs_rhv + self.vavs_no_rhv self.chw_coils = ['AH2A', 'AH2B'] self.vfds = ['AH2A', 'AH2B'] # define component fields self.components_by_type = { 'vfds': self.vfds, 'chw_coils': self.chw_coils, 'vavs_rhv': self.vavs_rhv, 'vavs_no_rhv': self.vavs_no_rhv } self.datapoints_by_type = { 'vfds': ['SF_VFD:POWER'], 'chw_coils': ['SAT', 'MAT', 'CCV', 'SF_CFM'], 'vavs_no_rhv': ['AIR_VOLUME', 'CTL_FLOW_MIN', 'CTL_FLOW_MAX', 'CTL_STPT'], 'vavs_rhv': ['AIR_VOLUME', 'CTL_FLOW_MIN', 'CTL_FLOW_MAX', 'CTL_STPT', 'VLV_POS', 'CLG_LOOPOUT', 'HTG_LOOPOUT', 'AI_3', 'rhv_closed_temp_change'] } self.unit_of_estimated_data = { 'rhw_cost': '$/hr', 'fan_cost': '$/hr', 'chw_cost': '$/hr', 'tot_cost': '$/hr', 'chw_power': 'kW', 'chw_power_AH2A': 'kW', 'chw_power_AH2B': 'kW', 'rhw_power': 'kW', 'fan_power': 'kW', 'fan_power_from_regression_curve': 'kW', 'fan_power_AH2A': 'kW', 'fan_power_AH2B': 'kW', 'ahu_afr': 'cfm' } # define data restriction self.restrict_central = "Metadata/SourceName = 'Sutardja Dai Hall BACnet' and Path ~ 'AH2' and " \ + "(Path ~ 'SAT' or Path ~ 'MAT' or Path ~ 'SF_CFM' or Path ~ 'CCV' or Path ~ 'SF_VFD:POWER') and " \ + "not (Path ~ 'STP' or Path ~ 'RAH')" self.restrict_local = "Metadata/SourceName = 'Sutardja Dai Hall BACnet' and Path ~ 'S[0-9]-[0-9][0-9]' and " \ + "not (Path ~ 'act') and (Path ~ 'AI_3' or Path ~ 'VLV_POS' or Path ~ 'HTG_LOOPOUT' or " \ + "Path ~ 'CLG_LOOPOUT' or Path ~ 'AIR_VOLUME' or Path ~ 'ROOM_TEMP' or " \ + "Path ~ 'CTL_FLOW_MIN' or Path ~ 'CTL_FLOW_MAX' or Path ~ 'CTL_STPT')" self.restrict_result = "Metadata/SourceName = 'Sutardja Dai Hall SAT Reset' and Path ~ 'SAT_Reset_debug_20160719' and " \ + "Path ~ 'rhv_closed_temp_change' or Path ~ 'computed_sat' and " \ + "not Path ~ 'SAT_Reset_debug_20160719_C'" self.restrict_new = "(%s) or (%s) or (%s)" % \ (self.restrict_central, self.restrict_local, self.restrict_result) self.restrict_oat = "Metadata/SourceName = 'Sutardja Dai Hall BACnet' and Path = '/Siemens/SDH.PXCM-08/SDH/OAT'" # creat smap client object self.archiver_url = 'http://new.openbms.org/backend' self.smap_client = SmapClient(self.archiver_url) # parameters self.limit = 40
class DataClient(object): def __init__(self,login): """""" self.c = SmapClient("http://%(host)s:%(port)s" % login) def get_data(self,uuid,start,end,limit=-1): # print start # print end startTime = dtutil.dt2ts(dtutil.strptime_tz(start, "%m/%d/%Y %H:%M:%S %p" )) endTime = dtutil.dt2ts(dtutil.strptime_tz(end, "%m/%d/%Y %H:%M:%S %p")) return self.c.data_uuid(uuid, startTime, endTime,True,limit)
def setup(self, opts): """Set up what streams are to be subsampled. We'll only find new streams on a restart ATM. """ restrict = opts.get("Restrict", "has Path and (not has Metadata/Extra/SourceStream)") OperatorDriver.setup(self, opts, shelveoperators=False, raw=True, inherit_metadata=False) client = SmapClient(smapconf.BACKEND) source_ids = client.tags(restrict, 'uuid, Properties/UnitofMeasure') for new in source_ids: id = str(new['uuid']) if not 'Properties/UnitofMeasure' in new: new['Properties/UnitofMeasure'] = '' if not id in self.operators: o1 = SubsampleOperator([new], 300) self.add_operator('/%s/%s' % (id, o1.name), o1) o2 = SubsampleOperator([new], 3600) self.add_operator('/%s/%s' % (id, o2.name), o2) log.msg("Done setting up subsample driver; " + str(len(source_ids)) + " ops")
def send_alert(self, to, alert, streams, level): # look up the tags for these streams uuids = set(streams.keys()) uuids = map(lambda u: "uuid = '%s'" % u, uuids) client = SmapClient() tags = client.tags(' or '.join(uuids), nest=True) tags = dict(((x['uuid'], x) for x in tags)) def make_context(params): rv = [] for uid, state in params.iteritems(): t, v = state['time'], state['value'] if uid in tags: rv.append(tags[uid]) rv[-1]['AlertTime'] = time.ctime(t/1000) rv[-1]['AlertValue'] = v return rv context = make_context(streams) logentry = Log(alert=alert, when=datetime.datetime.now()) logentry.save() # generate the text to send, by building a context for our # template. template = Template(self.template) context = Context({ 'streams' : context, 'level' : level, 'permalink' : settings.ROOT_NETLOC + '/admin/alert/log/' + str(logentry.id), 'alarmpage' : settings.ROOT_NETLOC + '/admin/alert/alert/' + str(alert.id), 'timestamp' : logentry.when, 'alarm' : alert.__unicode__(), }) logentry.message = template.render(context) print logentry.message logentry.save() emaillib.send(to, '%s from %s' % (level, settings.ROOT_NETLOC), logentry.message)
def read_stream_data(self, num_days=1): self.points = {} c = SmapClient("http://new.openbms.org/backend") for point in self.input_variables: q = "apply window(mean, field='second',width='%d') to data in (\"03/01/2015\" -%ddays, \"03/07/2015\") where Metadata/Name='%s'" % \ ( self.rate, num_days, point ) print q result = c.query(q) readings = result[0]["Readings"] self.points[point] = [ r[1] for r in result[0]["Readings"] ] for point in self.state_variables: query = "apply window(mean, field='second',width='%d') to data in (\"03/01/2015\" -%ddays, \"03/07/2015\") where Metadata/Name='%s'" % \ ( self.rate, num_days, point ) result = c.query(query) readings = result[0]["Readings"] self.points[point] = [ r[1] for r in result[0]["Readings"] ] self.predictions = [] self.model_params = [] self.actual_outputs = []
def read_stream_data(self, num_days=1): self.points = {} c = SmapClient("http://new.openbms.org/backend") for point in self.input_variables: q = "apply window(mean, field='second',width='%d') to data in (\"03/01/2015\" -%ddays, \"03/07/2015\") where Metadata/Name='%s'" % \ ( self.rate, num_days, point ) print q result = c.query(q) readings = result[0]["Readings"] self.points[point] = [r[1] for r in result[0]["Readings"]] for point in self.state_variables: query = "apply window(mean, field='second',width='%d') to data in (\"03/01/2015\" -%ddays, \"03/07/2015\") where Metadata/Name='%s'" % \ ( self.rate, num_days, point ) result = c.query(query) readings = result[0]["Readings"] self.points[point] = [r[1] for r in result[0]["Readings"]] self.predictions = [] self.model_params = [] self.actual_outputs = []
def setup(self, opts): url = opts.get('url', 'http://new.openbms.org/backend') self.client = SmapClient(url) self.limit = float(opts.get('limit', 300)) # Seconds self.rate = float(opts.get('rate', 300)) self.alert_interval = float(opts.get('alert_interval', 86400)) # Minimum seconds between alerts smtp_server = opts.get('smtp_server') self.server = smtplib.SMTP(smtp_server, 587) self.server.starttls() self.email = opts.get('email_address') pw = opts.get('password') self.server.login(self.email, pw) self.restrict = opts.get('restrict') self.recipient = opts.get('recipient') self.carrier = opts.get('carrier')
def __init__(self,login=None): """""" if login == None: login={} config=Config(Env.getpath('HOME')+'/common/weatherplotter.conf') login['host']=config['smap_server_host'] login['port']=config["smap_server_port"] print login self.login=login else: self.login=login print "http://%(host)s:%(port)s" % self.login self.c = SmapClient("http://%(host)s:%(port)s" % self.login)
class Alert(SmapDriver): def setup(self, opts): url = opts.get('url', 'http://new.openbms.org/backend') self.client = SmapClient(url) self.limit = float(opts.get('limit', 300)) # Seconds self.rate = float(opts.get('rate', 300)) self.alert_interval = float(opts.get( 'alert_interval', 86400)) # Minimum seconds between alerts smtp_server = opts.get('smtp_server') self.server = smtplib.SMTP(smtp_server, 587) self.server.starttls() self.email = opts.get('email_address') pw = opts.get('password') self.server.login(self.email, pw) self.restrict = opts.get('restrict') self.recipient = opts.get('recipient') self.carrier = opts.get('carrier') def start(self): self.process = periodicSequentialCall(self.read) self.process.start(self.rate) def read(self): data = self.client.latest(self.restrict, streamlimit=-1) for d in data: uuid = d["uuid"] latest = d["Readings"][0][0] / 1000 now = time.time() gap = now - latest if gap > self.limit: self.alert(uuid) self.process.stop() reactor.callLater(self.alert_interval, self.start) break def alert(self, uuid): message = '\nGap of more than %s seconds in data for %s: uuid=%s' % ( self.limit, self.restrict, uuid) print message self.server.sendmail(self.email, "%s@%s" % (self.recipient, self.carrier), message) self.process.stop()
class Alert(SmapDriver): def setup(self, opts): url = opts.get('url', 'http://new.openbms.org/backend') self.client = SmapClient(url) self.limit = float(opts.get('limit', 300)) # Seconds self.rate = float(opts.get('rate', 300)) self.alert_interval = float(opts.get('alert_interval', 86400)) # Minimum seconds between alerts smtp_server = opts.get('smtp_server') self.server = smtplib.SMTP(smtp_server, 587) self.server.starttls() self.email = opts.get('email_address') pw = opts.get('password') self.server.login(self.email, pw) self.restrict = opts.get('restrict') self.recipient = opts.get('recipient') self.carrier = opts.get('carrier') def start(self): self.process = periodicSequentialCall(self.read) self.process.start(self.rate) def read(self): data = self.client.latest(self.restrict, streamlimit=-1) for d in data: uuid = d["uuid"] latest = d["Readings"][0][0] / 1000 now = time.time() gap = now - latest if gap > self.limit: self.alert(uuid) self.process.stop() reactor.callLater(self.alert_interval, self.start) break def alert(self, uuid): message = '\nGap of more than %s seconds in data for %s: uuid=%s' % (self.limit, self.restrict, uuid) print message self.server.sendmail(self.email, "%s@%s" % (self.recipient, self.carrier), message) self.process.stop()
from smap.archiver.client import SmapClient import time import datetime import json import pandas as pd #pd.options.display.mpl_style = 'default' client = SmapClient('http://ciee.cal-sdb.org:8079') # timestamps end = int(time.time()) start = end - 60*60*24*30 # last month print start, end def get_demand(): # get energy data for same timeframe res = client.query('select uuid where Metadata/System = "Monitoring" and Properties/UnitofMeasure = "kW"') uuids = [x['uuid'] for x in res] data = dict(zip(uuids,client.data_uuid(uuids, start, end, cache=False))) # create dataframe, use time as index demand = pd.DataFrame(data.values()[0]) demand[0] = pd.to_datetime(demand[0], unit='ms') demand.index = demand[0] del demand[0] return demand def get_hvacstates(): # get all hvac_state timeseries res = client.query('select uuid where Metadata/System = "HVAC" and Path like "%hvac_state"') uuids = [x['uuid'] for x in res] data = dict(zip(uuids,client.data_uuid(uuids, start, end, cache=False)))
class ZoneController(driver.SmapDriver): def setup(self, opts): self.rate = float(opts.get('rate',10)) # Current state of the points self.heatSP=int(opts.get('defaultHeatSetpoint',68)) self.coolSP=int(opts.get('defaultCoolSetpoint',76)) self.therm_temp = 70 self.trim = int(opts.get('trim',0)) # dummy zoneCtrl action # create timeseries for zone controller actions heatSetPoint = self.add_timeseries('/heatSetpoint', 'F', data_type='double') coolSetPoint = self.add_timeseries('/coolSetpoint', 'F', data_type='double') # add actuators to them heatSetPoint.add_actuator(setpointActuator(controller=self, range=(40,90))) coolSetPoint.add_actuator(setpointActuator(controller=self, range=(40,90))) # get master set point stream paths self.archiver_url = opts.get('archiver_url','http://localhost:8079') self.heatSPwhere = opts.get('heatSPwhere', '') self.coolSPwhere = opts.get('coolSPwhere', '') self.thermwhere = opts.get('thermwhere', '') self.tempwhere = opts.get('tempwhere', '') print "ZoneController: heat sp where = ", self.heatSPwhere print "ZoneController: cool sp where = ", self.coolSPwhere print "ZoneController: thermostat where = ", self.thermwhere print "ZoneController: temp sensor where = ", self.tempwhere self.client = SmapClient(self.archiver_url) self.heatSPclient = RepublishClient(self.archiver_url, self.heatSPcb, restrict=self.heatSPwhere) self.coolSPclient = RepublishClient(self.archiver_url, self.coolSPcb, restrict=self.coolSPwhere) #self.tempclient = RepublishClient(self.archiver_url, self.tempcb, restrict=self.tempwhere) self.thermclient = RepublishClient(self.archiver_url, self.thermcb, restrict=self.thermwhere) def start(self): print "zone controller start: ", self.rate self.heatSPclient.connect() # activate subscription scheduler setpoints self.coolSPclient.connect() #self.tempclient.connect() self.thermclient.connect() periodicSequentialCall(self.read).start(self.rate) def read(self): all_readings = self.client.latest(self.tempwhere) for p in all_readings: print '-'*20 md = self.client.tags('uuid = "'+p['uuid']+'"')[0] print 'Room:', md['Metadata/Room'] print 'Reading:', p['Readings'][0][1] ts = dtutil.ts2dt(p['Readings'][0][0]/1000) print 'Time:', dtutil.strftime_tz(ts, tzstr='America/Los_Angeles') avg_room_temp = sum([x['Readings'][0][1] for x in all_readings]) / float(len(all_readings)) # get difference between avg room temperature and thermostat temperature new_diff = self.therm_temp - avg_room_temp # periodically update output streams. Here a bogus adjustment self.add('/heatSetpoint', self.heatSP + new_diff) self.add('/coolSetpoint', self.coolSP + new_diff) print "zone controller publish: ", self.heatSP, self.coolSP # Event handler for publication to heatSP stream def heatSPcb(self, _, data): # list of arrays of [time, val] print "ZoneController heatSPcb: ", data mostrecent = data[-1][-1] self.heatSP = mostrecent[1] def coolSPcb(self, _, data): # list of arrays of [time, val] print "ZoneController coolSPcb: ", data mostrecent = data[-1][-1] self.coolSP = mostrecent[1] def tempcb(self, _, data): # list of arrays of [time, val] print "ZoneController tempcb: ", data def thermcb(self, _, data): # list of arrays of [time, val] print "ZoneController thermcb: ", data self.therm_temp = data[-1][-1][1]
from smap.util import periodicSequentialCall from smap.contrib import dtutil from smap.util import find from datetime import timedelta, date import numpy as np import pandas as pd import pdb import csv import shutil import time import pprint as pp import datetime c = SmapClient(base='http://new.openbms.org/backend',\ key=['WE4iJWG7k575AluJ9RJyAZs25UO72Xu0b4RA',\ 'SA2nYWuHrJxmPNK96pdLKhnSSYQSPdALkvnA']) t = time.time() source = 'Brower BACnet' path_list_and = ['Brower', 'Field_Bus1'] path_list_or=[ 'BrowerAHU2/DA-T', 'BrowerAHU2/OA-T', 'BrowerAHU2/SF-value', 'BrowerAHU2/SF-speed', 'Plant/Condenser.CWP7-speed', 'Plant/Condenser.CWP8-speed', 'Plant/Condenser.CWS-T', 'Plant/Condenser.CWR-T', 'Plant/Condenser.HXR-T', 'Plant/Condenser.HXS-T',
"""Example code plotting one day's worth of outside air time-series. @author Stephen Dawson-Haggerty <*****@*****.**> """ from smap.archiver.client import SmapClient from smap.contrib import dtutil from matplotlib import pyplot from matplotlib import dates # make a client c = SmapClient("http://www.openbms.org/backend") # start and end values are Unix timestamps start = dtutil.dt2ts(dtutil.strptime_tz("3-1-2013", "%m-%d-%Y")) end = dtutil.dt2ts(dtutil.strptime_tz("3-2-2013", "%m-%d-%Y")) # hard-code the UUIDs we want to download oat = [ "395005af-a42c-587f-9c46-860f3061ef0d", "9f091650-3973-5abd-b154-cee055714e59", "5d8f73d5-0596-5932-b92e-b80f030a3bf7", "ec2b82c2-aa68-50ad-8710-12ee8ca63ca7", "d64e8d73-f0e9-5927-bbeb-8d45ab927ca5" ] # perform the download data = c.data_uuid(oat, start, end) # plot all the data
from smap.archiver.client import SmapClient from datetime import timedelta, date import sys, os, pdb import time import pandas as pd import numpy as np import datetime import math c = SmapClient(base='http://new.openbms.org/backend') # key=['XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8']) uuid_dict = { 'uuid1': { 'u': "b7051656-d8d5-53dd-9221-15de4ce84e43", 'name': "MSA.MAIN.PWR_REAL_3_P" }, 'uuid2': { 'u': "cc1dfe56-3abc-544e-add6-1bc88712fc90", 'name': "MSB.MAIN.PWR_REAL_3_P" } } restrict = 'Metadata/SourceName = "Sutardja Dai Hall BACnet" and (uuid ="%s" or uuid = "%s")'\ %(str(uuid_dict['uuid1']['u']), str(uuid_dict['uuid2']['u'])) #pdb.set_trace() #TODO: Change Date range here startF = date(2015, 7, 17) endF = date(2016, 7, 17) #Create an empty dataframe to put the data in dts_startF = time.mktime(startF.timetuple())
import os import sys from smap.archiver.client import SmapClient from VavDataReader import importVavData import pandas as pd qStr = 'select Path, uuid where Path like "%S_-%" and Metadata/SourceName = "Sutardja Dai Hall BACnet"' validVAVs = importVavData(server='http://www.openbms.org/backend', query=qStr) interpolation_time = '5T' c = SmapClient("http://new.openbms.org/backend") outputDir = "Data" if len(sys.argv) < 2: print "No output directory provided. Using default <Data>" else: outputDir = sys.argv[1].strip() if os.path.exists(outputDir): if not os.path.isdir(outputDir): print "File with the same name exists. Delete it first" exit() else: os.makedirs(outputDir) startDate = "6/08/2015" endDate = "6/09/2015" numRooms = len(validVAVs) count = 0 for room in validVAVs:
from smap.archiver.client import SmapClient from smap.contrib import dtutil import pdb, time, datetime import numpy as np import pdb import smtplib import pprint as pp c = SmapClient("http://new.openbms.org/backend") t = time.time() source = 'Sutardja Dai Hall TAV' error_uuid_list = ['36335391-49a6-5b21-9f00-fcce66eb5a74'] N = len(error_uuid_list) where = "Metadata/SourceName = '%s' and (" %(source)\ + ' or\n '.join(["uuid = '%s'"] * N) \ %tuple(error_uuid_list) + ")" tags = c.tags(where) error_dict = {} error_list = [[1441431059000.0, 1.0], [1441430819000.0, 1.0]] #error_list = [[1441605186000.0, 0.0]] pp.pprint(error_list) for tag in tags: name = str(tag['Path'].split('/')[1]) u = str(tag['uuid']) if name not in error_dict: error_dict[name] = {} if u not in error_dict[name]: error_dict[name]['uuid'] = u error_dict[name]['Readings'] = []
import os import sys from smap.archiver.client import SmapClient from smap.contrib import dtutil import re import json c = SmapClient("http://new.openbms.org/backend", key="NAXk19YY45TTiXlajiQGQ8KTp283oHfp2Uly") rooms = c.query("select distinct Metadata/room-id where Metadata/site='STA'") metadata = {} count = 0 numRooms = len(rooms) for room in rooms: count += 1 print "Building Metadata for room : %s (%d/%d)" % (room, count, numRooms) metadata[room] = {} sensors = c.query("select * where Metadata/room-id='" + str(room) + "' and Metadata/site='STA'") for i in range(len(sensors)): if "Name" not in sensors[i]["Metadata"]: continue pointName = sensors[i]["Metadata"]["Name"] roomMetadata = sensors[i]["Metadata"] if "room_temp" in roomMetadata: metadata[room]["room_temp"] = pointName if "supply_air_velocity" in roomMetadata or "supply_air_volume" in roomMetadata: metadata[room]["supply_air_velocity"] = pointName if "reheat_valve_position" in roomMetadata: metadata[room]["reheat_valve_position"] = pointName
import os import sys from smap.archiver.client import SmapClient from smap.contrib import dtutil import re import json inf = open("room_metadata") roomMetadata = json.load(inf) c = SmapClient("http://new.openbms.org/backend") outputDir = "data" if len(sys.argv) < 2: print "No output directory provided. Using default <data>" else: outputDir = sys.argv[1].strip() if os.path.exists(outputDir): if not os.path.isdir(outputDir): print "File with the same name exists. Delete it first" exit() else: os.makedirs(outputDir) startDate = "05/27/2015" endDate = "07/04/2015" numRooms = len(roomMetadata) count = 0 for room in roomMetadata:
'S5-14', 'S5-16', 'S5-18', 'S5-19', 'S5-20', 'S5-21', 'S6-01', 'S6-02', 'S6-03', 'S6-04', 'S6-05', 'S6-06', 'S6-07', 'S6-08', 'S6-10', 'S6-11', 'S6-12', 'S6-13', 'S6-15', 'S6-17', 'S6-18', 'S6-19', 'S6-20', 'S7-01', 'S7-02', 'S7-03', 'S7-04', 'S7-05', 'S7-06', 'S7-07', 'S7-08', 'S7-09', 'S7-10', 'S7-13', 'S7-14', 'S7-15', 'S7-16' ] rh_coils = dict(zip(rh_coils,[2.0]*len(rh_coils)*pq.F)) rh_stream_names = [] for name in rh_coils: rh_stream_names += ['coil_closed_temp_change_' + name] + \ ['hot_water_' + name] + \ ['instantaneous_zone_load_' + name] c = SmapClient(base='http://new.openbms.org/backend',\ key='XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8') # Query necessary tags for energy data source_energy = 'Sutardja Dai Hall Energy Data' all_points = pointnames + rh_stream_names + chw_stream_names where_energy = "Metadata/SourceName = '%s' and Path ~ '%s' and (" \ %(source_energy, p)\ + ' or '.join(["Path ~ '%s'"] * len(all_points))\ %tuple(all_points) + ")" tags_energy = c.tags(where_energy) # Query data for energy calcs as AHU level source = 'Sutardja Dai Hall BACnet' path_list = { 'AH2A_SF_VFD' : 'SDH/AH2A/SF_VFD:POWER', 'AH2B_SF_VFD' : 'SDH/AH2B/SF_VFD:POWER',
"""Example code plotting one day's worth of outside air time-series, locating the streams using a metadata query. @author Stephen Dawson-Haggerty <*****@*****.**> """ from smap.archiver.client import SmapClient from smap.contrib import dtutil from matplotlib import pyplot from matplotlib import dates # make a client c = SmapClient("http://www.openbms.org/backend") # start and end values are Unix timestamps start = dtutil.dt2ts(dtutil.strptime_tz("1-1-2013", "%m-%d-%Y")) end = dtutil.dt2ts(dtutil.strptime_tz("1-2-2013", "%m-%d-%Y")) # download the data and metadata tags = c.tags("Metadata/Extra/Type = 'oat'") uuids, data = c.data("Metadata/Extra/Type = 'oat'", start, end) # make a dict mapping uuids to data vectors data_map = dict(zip(uuids, data)) # plot all the data for timeseries in tags: d = data_map[timeseries['uuid']] # since we have the tags, we can add some metadata label = "%s (%s)" % (timeseries['Metadata/SourceName'],
""" sum up the feeds of energy consumption from a building and apply EMD to the sum @author: Dezhi """ import urllib2 import numpy as np import matplotlib.pyplot as plt import time import csv from smap.archiver.client import SmapClient from smap.contrib import dtutil from EMDpython import EMD from temp import get_temp # make a client c = SmapClient("http://new.openbms.org/backend") # get the components for query bldg_list = [i.strip('\n') for i in open('bldg_list.txt', 'r').readlines()] index = range(1,len(bldg_list)) bldg_dict = dict(zip(index, bldg_list)) print "================================================" print "\n".join(["%s-%s" %(k,v) for k,v in bldg_dict.items()]) num = raw_input("choose a # from above to query: ") bldg = bldg_dict[int(num)] # start = raw_input("start time (\"%m-%d-%Y %H:%M\" or "-d" for default): ") # end = raw_input("end time (\"%m-%d-%Y %H:%M\") or "-d" for default: ") start = "10-21-2013 00:00" end = "10-27-2013 23:59" # get the outside air temperature during the period specified
%(str(f).zfill(2), str(zone_name)) + '%s-%s_demo_temp.csv'%(start.strftime("%Y%m%d"), end.strftime("%Y%m%d"))) #pdb.set_trace() #Combine all zones (columns) in a dataframe #TODO: create a temp file with aggregated floors df_floor = reduce(lambda x,y: \ pd.merge(x, y, on=['timestamp','datetime']),floor_frames) #+ 'Floor%s_airflow_temp_load' %(str(f).zfill(2)) \ #TODO: change file name HERE floor_path = floor_dir \ + 'Floor%s_demo' %(str(f).zfill(2))\ + '%s-%s'%(startF.strftime("%Y%m%d"), endFp.strftime("%Y%m%d"))\ + '.csv' df_floor.to_csv(floor_path) c = SmapClient(base='http://new.openbms.org/backend',\ key=['XuETaff882hB6li0dP3XWdiGYJ9SSsFGj0N8']) source = 'Sutardja Dai Hall BACnet' source_tav = 'Sutardja Dai Hall TAV' source_energy = 'Sutardja Dai Hall Energy Data' path_and_tav = ['tav_whole_bldg/'] path_and_energy = ['energy_data/', 'variable_elec_cost/'] #TODO : include points we want #points = ['CTL_FLOW_MAX', 'CTL_FLOW_MIN', 'AIR_VOLUME'] #, 'ROOM_TEMP', 'CTL_STPT'] points = [ 'AIR_VOLUME', 'CTL_FLOW_MIN', 'CLG_LOOPOUT', 'HTG_LOOPOUT', 'DMPR_POS', 'DMPR_CMD' ] #points_tav = ['/cycle'] points_tav = [ 'average_airflow_in_cycle', 'average_airflow_in_hour', 'tav_active' ]
import os import sys from smap.archiver.client import SmapClient from smap.contrib import dtutil import re import json c = SmapClient("http://new.openbms.org/backend", key="NAXk19YY45TTiXlajiQGQ8KTp283oHfp2Uly") rooms = c.query("select distinct Metadata/room-id where Metadata/site='STA'") metadata = {} count = 0 numRooms = len(rooms) for room in rooms: count += 1 print "Building Metadata for room : %s (%d/%d)" % (room, count, numRooms ) metadata[room] = {} sensors = c.query("select * where Metadata/room-id='" + str(room) + "' and Metadata/site='STA'") for i in range(len(sensors)): if "Name" not in sensors[i]["Metadata"]: continue pointName = sensors[i]["Metadata"]["Name"] roomMetadata = sensors[i]["Metadata"] if "room_temp" in roomMetadata: metadata[room]["room_temp"] = pointName if "supply_air_velocity" in roomMetadata or "supply_air_volume" in roomMetadata: metadata[room]["supply_air_velocity"] = pointName if "reheat_valve_position" in roomMetadata: metadata[room]["reheat_valve_position"] = pointName
from smap.archiver.client import SmapClient from smap.contrib import dtutil from matplotlib import pyplot from matplotlib import dates import os import re import json import ast from datetime import datetime # make a client c = SmapClient("http://new.openbms.org/backend") # get tag list #tag = [i.strip('\n') for i in open('tagList.txt', 'r').readlines()] stnc = "select distinct Path" result = c.query(stnc) outFile = open("MetadataDump","w") finalStructure = {} count = 0 lastTime = datetime.now() print len(result) print "Experiment started at : ",lastTime for tag in result: queryString = "select * where Path='" + tag + "'" out = c.query(queryString) #finalStructure[tag] = out[0] count += 1 if count % 100 == 0:
@authour Shuyang Li <*****@*****.**> """ from smap.archiver.client import SmapClient from smap.contrib import dtutil import datetime, time, pdb, re import numpy as np import quantities as pq from matplotlib import pyplot from matplotlib import dates ##t = time.time() t = time.strptime("30 Nov 15 23 40 00", "%d %b %y %H %M %S") t = time.mktime(t) c = SmapClient("http://www.openbms.org/backend") source = "Metadata/SourceName = 'Sutardja Dai Hall BACnet'" where = source + " and Path ~ 'S[0-9]-[0-9][0-9]' and" +\ "(Path ~ 'CLG_LOOPOUT' or Path ~ 'HTG_LOOPOUT' or " +\ "Path ~ 'AIR_VOLUME' or Path ~ 'VLV_POS' or " +\ "Path ~ 'AI_3' or Path ~ '')" tags = c.tags(where) data = c.prev(where, t, streamlimit=1000, limit=1000) vavs_rhv = [] # vav with reheat valve vavs_rhv = ['S1-01', 'S1-02', 'S1-03', 'S1-04', 'S1-07', 'S1-08', 'S1-09', 'S1-10', 'S1-13', 'S1-15', 'S1-16', 'S1-17', 'S1-18', 'S1-19', 'S1-20', 'S2-01', 'S2-02', 'S2-03', 'S2-04', 'S2-05', 'S2-06', 'S2-07', 'S2-10', 'S2-11', 'S2-12', 'S2-13', 'S2-14', 'S2-15', 'S2-16', 'S2-17', 'S2-18', 'S2-19', 'S2-20', 'S2-21', 'S3-01', 'S3-02', 'S3-03', 'S3-04', 'S3-05', 'S3-06', 'S3-07', 'S3-08', 'S3-09', 'S3-10', 'S3-11', 'S3-12', 'S3-15', 'S3-16', 'S3-17', 'S3-18', 'S3-19', 'S3-20', 'S3-21', 'S4-01', 'S4-02', 'S4-03', 'S4-04', 'S4-05', 'S4-06', 'S4-07', 'S4-08', 'S4-09', 'S4-11', 'S4-12', 'S4-13', 'S4-15', 'S4-16', 'S4-18', 'S4-19', 'S4-20', 'S4-21', 'S5-01', 'S5-02', 'S5-03', 'S5-04', 'S5-05', 'S5-06', 'S5-07', 'S5-08', 'S5-09', 'S5-10', 'S5-11', 'S5-12', 'S5-13', 'S5-14', 'S5-16', 'S5-18', 'S5-19', 'S5-20', 'S5-21', 'S6-01', 'S6-02', 'S6-03', 'S6-04', 'S6-05', 'S6-06', 'S6-07', 'S6-08', 'S6-10', 'S6-11', 'S6-12', 'S6-13', 'S6-15', 'S6-17', 'S6-18', 'S6-19', 'S6-20', 'S7-01', 'S7-02', 'S7-03', 'S7-04', 'S7-05', 'S7-06', 'S7-07', 'S7-08', 'S7-09', 'S7-10', 'S7-13', 'S7-14', 'S7-15', 'S7-16'] for v in sorted(vavs_rhv): u_rhv = [tag['uuid'] for tag in tags if v in tag['Path'] and 'VLV_POS' in tag ['Path']] u_ht = [tag['uuid'] for tag in tags if v in tag['Path'] and 'HTG_LOOPOUT' in tag ['Path']] u_dat = [tag['uuid'] for tag in tags if v in tag['Path'] and 'AI_3' in tag ['Path']]
OF THE POSSIBILITY OF SUCH DAMAGE. """ """ Keti mote protocol implementation and sMAP driver. @author Stephen Dawson-Haggerty <*****@*****.**> """ import datetime from smap.archiver.client import SmapClient from smap.contrib import dtutil import numpy as np import matplotlib.pyplot as plt c = SmapClient('http://ar1.openbms.org:8079') HOURS = 5 RATES = [#("#", 10), ("ppm", 5), ("C", 5)] prrs = [] for unit, rate in RATES: counts = c.query(("apply count to data in now -%ih, now " "limit -1 streamlimit 1000 where " "Properties/UnitofMeasure = '%s' and " "Metadata/SourceName = 'KETI Motes'") % (HOURS, unit)) for v in counts: r = np.array(v['Readings']) if len(r):
def load_tags(self): """Load the matching tags (in a thread)""" c = SmapClient(self.source_url) return c.tags(self.restrict)
return inputs class ProcessOperator(operators.ParallelSimpleOperator): base_operator = staticmethod(thetaprobe) name = 'movingavg-' operator_name = 'movingavg' operator_constructors = [(), (int,)] def __init__(self, inputs, lag=10): self.name = 'movingavg-' + str(lag) operators.ParallelSimpleOperator.__init__(self, inputs, lag=lag) ### test ### c=SmapClient() inputs=c.data_uuid() # hist=null inputs = np.array(inputs, dtype=float) data = np.vstack(inputs) nd=thetaprobe(inputs) RepublishClient()
def __init__(self,login): """""" self.c = SmapClient("http://%(host)s:%(port)s" % login)
from smap.archiver.client import SmapClient from smap.contrib import dtutil import numpy as np import pandas as pd import datetime import subprocess #Link to download the data c = SmapClient("http://iiitdarchiver.zenatix.com:9105") #Range of dates to which you want to download the data start = dtutil.dt2ts(dtutil.strptime_tz("01-10-2017", "%d-%m-%Y")) end = dtutil.dt2ts(dtutil.strptime_tz("01-10-2017", "%d-%m-%Y")) # hard-code the UUIDs we want to download oat = ["eec41258-f057-591e-9759-8cfdeb67b9af"] # Function to perform the download of the data data = c.data_uuid(oat, start, end) t = np.array(data) df = pd.DataFrame(t) # creating files after downloading for i, j in enumerate(t): name = str(i) + '.csv' with open(name, 'w') as f: for time, val in j: f.write( str(datetime.datetime.fromtimestamp(time / 1000.0)) + ' , ' +
from smap.contrib import dtutil from sklearn.tree import DecisionTreeClassifier as DT import numpy as np import os def get_feature(data): mean = np.mean(data) median = np.median(data) std = np.std(data) q1 = np.percentile(data,25) q3 = np.percentile(data,75) vector = [mean, median, std, q1, q3] return vector c = SmapClient("http://new.openbms.org/backend") lines = open('MetadataDump').readlines() sdh_path = [i.strip('\n') for i in open('sdh_path','r').readlines()] search_path = [] search_res = [] res = keywordSearch(lines, "sdh temp", 2000) water = keywordSearch(lines, "sdh hw temp", 1000) stp = keywordSearch(lines, "sdh temp stp", 1000) search = keywordSearch(lines, "sdh room temp", 2000) add = keywordSearch(lines, "sdh rm temp", 1000) for i in water: res.remove(i) for i in stp: res.remove(i) for r in res:
from smap.archiver.client import SmapClient from smap.contrib import dtutil from matplotlib import pyplot from matplotlib import dates import os # make a client c = SmapClient("http://new.openbms.org/backend") # start and end values are Unix timestamps t_start = "6-12-2013 8:00" t_end = "6-19-2013 8:00" start = 1000*dtutil.dt2ts(dtutil.strptime_tz(t_start, "%m-%d-%Y %H:%M")) end = 1000*dtutil.dt2ts(dtutil.strptime_tz(t_end, "%m-%d-%Y %H:%M")) stnc = "select distinct Metadata/Location/RoomNumber where Metadata/SourceName='KETI Motes'" roomlist = c.query(stnc) #the result is a list #roomlist = roomlist[16:] #roomlist = ['621A','621B','621C','621D','621E'] for room in roomlist: print "==========Fetching streams in Room %s=========="%room stnc = "select Path where Metadata/Location/RoomNumber='%s' and not Path ~ '.*pir.*'" %room streams = c.query(stnc) if len(streams)>0: # print "----%d streams in Room %s----"%(len(streams), room) for s in streams: # fetch the metadata of path wanted tags = c.tags("Path='%s'"%s['Path'])
from smap.archiver.client import SmapClient import time import datetime import json import pandas as pd #pd.options.display.mpl_style = 'default' client = SmapClient('http://ciee.cal-sdb.org:8079') # timestamps end = int(time.time()) #start = end - 60*60*24*30 # last month start = end - 60 * 60 * 24 * 7 # last week zones = client.query('select distinct Metadata/HVACZone') def getdataasjson(query, start, end): tmp_data = client.data(query, start, end, cache=True, limit=500000) if not len(tmp_data[0]): return {} tmp = pd.DataFrame(tmp_data[1][0]) if len(tmp.notnull()) == 0: return {} tmp = tmp[pd.np.abs(tmp[1] - tmp[1].mean()) <= 5 * tmp[1].std()] tmp[0] = pd.to_datetime(tmp[0], unit='ms') tmp.index = tmp[0] tmp = tmp.drop_duplicates() del tmp[0] return json.loads(tmp.to_json())["1"]