Exemple #1
0
def load_ion_data_old():
    with open('config/bd2_ion_secret.json', 'r') as fp:
        config = json.load(fp)
    ds = DataService(config['hostname'], config['apikey'], config['user'])
    with open('metadata/ebu3b_ion.json', 'r') as fp:
        ion_srcids = json.load(fp)
    for srcid, bd_srcid in ion_srcids.items():
        uuid = ds.list_sensors({'source_identifier':
                                bd_srcid})['sensors'][0]['uuid']
        raw_data = ds.get_timeseries_datapoints(uuid, 'PresentValue',
                                                begin_time, end_time)
        store_data(raw_data, './data/{0}.csv'.format(srcid))
Exemple #2
0
def load_building_data():

    with open('config/bd2_secret.json', 'r') as fp:
        config = json.load(fp)

    ds = DataService(config['hostname'], config['apikey'], config['user'])

    with open('metadata/ebu3b_bacnet.json', 'r') as fp:
        naes = json.load(fp)

    srcids = []
    for nae_num, nae in naes.items():
        objs = nae['objs'][1:]
        srcids += [
            '{0}_{1}_{2}'.format(nae_num, obj['props']['type'],
                                 obj['props']['instance']) for obj in objs
            if obj['props']['type'] in [0, 1, 2, 3, 4, 5, 13, 14, 19]
        ]

    srcid = '506_0_3000485'
    nonexisting_srcids = []
    for srcid in srcids:
        uuid = ds.list_sensors({'source_identifier':
                                srcid})['sensors'][0]['uuid']
        #end_time = arrow.get(arrow.get().datetime, 'US/Pacific').datetime

        try:
            raw_data = ds.get_timeseries_datapoints(uuid, 'PresentValue',
                                                    begin_time, end_time)
        except:
            print('{0} is not found in ds.'.format(srcid))
            nonexisting_srcids.append(srcid)
            continue
        #data = reduce(updater, raw_data['timeseries'], {})
        #series = pd.Series(index=data.keys(), data=list(data.values()))
        #series.to_csv('./data/{0}.csv'.format(srcid))
        store_data(raw_data, './data/{0}.csv'.format(srcid))

    with open('nonexisting_srcids.json', 'w') as fp:
        json.dump(nonexisting_srcids, fp, indent=2)
class BDWrapper:

	bdDS = None
	pst = timezone('US/Pacific')
	utc = timezone('UTC')
	bdStrFormat = '%Y-%m-%dT%H:%M:%S+00:00'

	def __init__(self):
		self.bdDS = DataService(authdata.srcUrlBase, authdata.bdApiKey, authdata.bdUserName)

	def get_sensor_uuids(self, context):
# context (a series of dictionary) -> uuid (list)
#context is 
		try:
			resp = self.bdDS.list_sensors(context)
			uuids = list()
			for sensor in resp['sensors']:
				uuids.append(sensor['uuid'])
			return uuids
		except BDError as e:
			print e
			return []

	def get_sensor_names(self, context):
		try:
			resp = self.bdDS.list_sensors(context)
			uuids = list()
			for sensor in resp['sensors']:
				uuids.append(sensor['source_name'])
			return uuids
		except BDError as e:
			print e
			return []


	def get_sensor_ts(self, uuid, sensorType, beginTime, endTime):
# uuid(string), sensorType(string), beginTime(datetime), endTime(datetime) -> timeseries (pd.Series)
# Note: beginTime and endtime should not be normalized already. should be a raw format.
		isoBegin = self.pst.localize(beginTime)
		isoEnd = self.pst.localize(endTime)
		try:
			rawData = self.bdDS.get_timeseries_datapoints(uuid, sensorType, isoBegin, isoEnd) #return json
			pdseries = self.rawts2pdseries(rawData['timeseries'])
			#pdts = self.rawts2pdts(rawData['timeseries'])
			return pdseries
		except BDError as e:
			print e
			return None
		
# TODO: Can I do this better? hard to convert list of dict into dataframe
	def rawts2pdts(self, rawData):
		rawData = OrderedDict([(key,d[key]) for d in rawData for key in d])
		sortedData = rawData
		#rawData = dict([(key,d[key]) for d in rawData for key in d])
		#sortedData = OrderedDict(sorted(rawData.items(), key=operator.itemgetter(0)))
		pdts = pd.DataFrame({'timestamp':sortedData.keys(), 'value':sortedData.values()})
		g = lambda tp:datetime.strptime(tp, self.bdStrFormat).replace(tzinfo=self.utc).astimezone(self.pst).replace(tzinfo=None)
		pdts['timestamp'] = pdts['timestamp'].apply(g)
		return pdts

	def rawts2pdseries(self,rawData):
		rawData = OrderedDict([(key,d[key]) for d in rawData for key in d])
		for key in rawData.keys():
			newKey = datetime.strptime(key, self.bdStrFormat).replace(tzinfo=self.utc).astimezone(self.pst).replace(tzinfo=None)
			rawData[newKey] = rawData.pop(key)
		pdseries = pd.Series(data=rawData.values(),index=rawData.keys())
		return pdseries


# TODO: Do not make multiple writing (maybe remove this?)
	def get_zone_sensor_ts(self, zone, template, sensorType, beginTime, endTime):
# zone(string), template(string), sensorType(string), beginTime(datetime), endTime(datetime) -> ts(pd.Series)
# Note: This is a wrapper for easy use of get sensor time series
		context = {'room':zone, 'template':template}
		try:
			uuids = self.get_sensor_uuids(context)
			if len(uuids)>1:
				print "ERROR: More than one sensor are found: " + string(len(uuids)) + ' sensors are found'
				return None
			elif len(uuids)==0:
				print "ERROR: No sensor is found"
				return None
			ts = self.get_sensor_ts(uuids[0], sensorType, beginTime, endTime)
			return ts
		except BDError as e:
			print e
			#TODO or just return None?
			return pd.Series()

	def set_sensor(self, uuid, sensorType, tp, val):
# uuid(string), sensorType(string), ts(list of dict) -> success(boolean)
# An example of ts: [{datetime(2014,1,1,0,0,0):72}]
		newts = list()
		newts.append({self.pst.localize(tp).isoformat():val})
		try:
			self.bdDS.put_timeseries_datapoints(uuid, sensorType, newts)
			return True
		except BDError as e:
			print e
			return False