コード例 #1
0
def load_uuid_data():
    from building_depot import DataService, BDError

    # load config
    config = json.load(
        open('config/data-params.json'))  # '../data-params.json'
    cse_dataservice_url = config["cse_dataservice_url"]
    bd_username = config["bd_username"]
    bd_api_key = config["bd_api_key"]
    remote_sensors = config["remote_sensors"]

    etl_config = json.load(
        open('config/etl-params.json'))  # '../data-params.json'
    source_repo = etl_config["source_repo"]
    repo_name = etl_config["repo_name"]
    api_fp = etl_config["api_fp"]
    data_fp = etl_config["data_fp"]

    #Connect with BuildingDepot
    ds = DataService(cse_dataservice_url, bd_api_key, bd_username)

    data = {}
    for room in list(config["target_rooms_setpoint_values"].keys()) + list(
            config["uncontrolled_rooms"].keys()):
        query = {'room': room}
        resp = ds.list_sensors(query)
        uuids = {
            sensor['template']: sensor['uuid']
            for sensor in resp['sensors']
            if sensor['template'] in remote_sensors
        }
        data[room] = uuids

    print(data)
コード例 #2
0
def load_ion_data_old():
    with open('config/bd2_ion_secret.json', 'r') as fp:
        config = json.load(fp)
    ds = DataService(config['hostname'], config['apikey'], config['user'])
    with open('metadata/ebu3b_ion.json', 'r') as fp:
        ion_srcids = json.load(fp)
    for srcid, bd_srcid in ion_srcids.items():
        uuid = ds.list_sensors({'source_identifier':
                                bd_srcid})['sensors'][0]['uuid']
        raw_data = ds.get_timeseries_datapoints(uuid, 'PresentValue',
                                                begin_time, end_time)
        store_data(raw_data, './data/{0}.csv'.format(srcid))
コード例 #3
0
def load_building_data():

    with open('config/bd2_secret.json', 'r') as fp:
        config = json.load(fp)

    ds = DataService(config['hostname'], config['apikey'], config['user'])

    with open('metadata/ebu3b_bacnet.json', 'r') as fp:
        naes = json.load(fp)

    srcids = []
    for nae_num, nae in naes.items():
        objs = nae['objs'][1:]
        srcids += [
            '{0}_{1}_{2}'.format(nae_num, obj['props']['type'],
                                 obj['props']['instance']) for obj in objs
            if obj['props']['type'] in [0, 1, 2, 3, 4, 5, 13, 14, 19]
        ]

    srcid = '506_0_3000485'
    nonexisting_srcids = []
    for srcid in srcids:
        uuid = ds.list_sensors({'source_identifier':
                                srcid})['sensors'][0]['uuid']
        #end_time = arrow.get(arrow.get().datetime, 'US/Pacific').datetime

        try:
            raw_data = ds.get_timeseries_datapoints(uuid, 'PresentValue',
                                                    begin_time, end_time)
        except:
            print('{0} is not found in ds.'.format(srcid))
            nonexisting_srcids.append(srcid)
            continue
        #data = reduce(updater, raw_data['timeseries'], {})
        #series = pd.Series(index=data.keys(), data=list(data.values()))
        #series.to_csv('./data/{0}.csv'.format(srcid))
        store_data(raw_data, './data/{0}.csv'.format(srcid))

    with open('nonexisting_srcids.json', 'w') as fp:
        json.dump(nonexisting_srcids, fp, indent=2)
コード例 #4
0
from datetime import timedelta
from building_depot import DataService
from pprint import pprint

TRYLIMIT = 2  # number of times to attempt actuation on room
ACTUATE_SENSOR = 'Actual Sup Flow SP'  # sensor to actuate on
OBSERVE_SENSOR = 'Actual Supply Flow'  # sensor to check for correct response to change in ACTUATE_SENSOR
MARGIN_OF_ERROR = 0.95  # minimum (actual/setpoint) value that is considered successful
WAIT_TIME = 5  # maximum time in minutes between actuate and observe times to determine that room is successful or failed

PT = 'US/Pacific'

bd_username = "******"
bd_api_key = "313b0b78-7981-45ee-81d4-0fbe87963110"
dataservice_url = "https://bd-datas1.ucsd.edu"
ds = DataService(dataservice_url, bd_api_key, bd_username)

uuids = {}  # hold sensor uuids for each room
time_actuated = {}  # records when each room was actuated

# types of sensors being observed
templates = {
    OBSERVE_SENSOR,
    ACTUATE_SENSOR,
}

time_format = 'YYYY-MM-DD HH:mm:ss'


# preparation of the experiment to obtain the sensor list
def obtain_uuid_list(room_list):
コード例 #5
0
	def __init__(self):
		self.bdDS = DataService(authdata.srcUrlBase, authdata.bdApiKey, authdata.bdUserName)
コード例 #6
0
class BDWrapper:

	bdDS = None
	pst = timezone('US/Pacific')
	utc = timezone('UTC')
	bdStrFormat = '%Y-%m-%dT%H:%M:%S+00:00'

	def __init__(self):
		self.bdDS = DataService(authdata.srcUrlBase, authdata.bdApiKey, authdata.bdUserName)

	def get_sensor_uuids(self, context):
# context (a series of dictionary) -> uuid (list)
#context is 
		try:
			resp = self.bdDS.list_sensors(context)
			uuids = list()
			for sensor in resp['sensors']:
				uuids.append(sensor['uuid'])
			return uuids
		except BDError as e:
			print e
			return []

	def get_sensor_names(self, context):
		try:
			resp = self.bdDS.list_sensors(context)
			uuids = list()
			for sensor in resp['sensors']:
				uuids.append(sensor['source_name'])
			return uuids
		except BDError as e:
			print e
			return []


	def get_sensor_ts(self, uuid, sensorType, beginTime, endTime):
# uuid(string), sensorType(string), beginTime(datetime), endTime(datetime) -> timeseries (pd.Series)
# Note: beginTime and endtime should not be normalized already. should be a raw format.
		isoBegin = self.pst.localize(beginTime)
		isoEnd = self.pst.localize(endTime)
		try:
			rawData = self.bdDS.get_timeseries_datapoints(uuid, sensorType, isoBegin, isoEnd) #return json
			pdseries = self.rawts2pdseries(rawData['timeseries'])
			#pdts = self.rawts2pdts(rawData['timeseries'])
			return pdseries
		except BDError as e:
			print e
			return None
		
# TODO: Can I do this better? hard to convert list of dict into dataframe
	def rawts2pdts(self, rawData):
		rawData = OrderedDict([(key,d[key]) for d in rawData for key in d])
		sortedData = rawData
		#rawData = dict([(key,d[key]) for d in rawData for key in d])
		#sortedData = OrderedDict(sorted(rawData.items(), key=operator.itemgetter(0)))
		pdts = pd.DataFrame({'timestamp':sortedData.keys(), 'value':sortedData.values()})
		g = lambda tp:datetime.strptime(tp, self.bdStrFormat).replace(tzinfo=self.utc).astimezone(self.pst).replace(tzinfo=None)
		pdts['timestamp'] = pdts['timestamp'].apply(g)
		return pdts

	def rawts2pdseries(self,rawData):
		rawData = OrderedDict([(key,d[key]) for d in rawData for key in d])
		for key in rawData.keys():
			newKey = datetime.strptime(key, self.bdStrFormat).replace(tzinfo=self.utc).astimezone(self.pst).replace(tzinfo=None)
			rawData[newKey] = rawData.pop(key)
		pdseries = pd.Series(data=rawData.values(),index=rawData.keys())
		return pdseries


# TODO: Do not make multiple writing (maybe remove this?)
	def get_zone_sensor_ts(self, zone, template, sensorType, beginTime, endTime):
# zone(string), template(string), sensorType(string), beginTime(datetime), endTime(datetime) -> ts(pd.Series)
# Note: This is a wrapper for easy use of get sensor time series
		context = {'room':zone, 'template':template}
		try:
			uuids = self.get_sensor_uuids(context)
			if len(uuids)>1:
				print "ERROR: More than one sensor are found: " + string(len(uuids)) + ' sensors are found'
				return None
			elif len(uuids)==0:
				print "ERROR: No sensor is found"
				return None
			ts = self.get_sensor_ts(uuids[0], sensorType, beginTime, endTime)
			return ts
		except BDError as e:
			print e
			#TODO or just return None?
			return pd.Series()

	def set_sensor(self, uuid, sensorType, tp, val):
# uuid(string), sensorType(string), ts(list of dict) -> success(boolean)
# An example of ts: [{datetime(2014,1,1,0,0,0):72}]
		newts = list()
		newts.append({self.pst.localize(tp).isoformat():val})
		try:
			self.bdDS.put_timeseries_datapoints(uuid, sensorType, newts)
			return True
		except BDError as e:
			print e
			return False
コード例 #7
0
ファイル: sample_crawl.py プロジェクト: jbkoh/bd_aggregator
from building_depot import DataService, BDError
import json
import authdata

bdDS = DataService(authdata.srcUrlBase, authdata.bdApiKey, authdata.bdUserName)

sensors = bdDS.list_sensors(query_context={"room":"rm-2150"})['sensors']

sensorDict = dict()
batchQ = dict()
beginTimeStr = '2015-03-01T00:00:00'
endTimeStr = '2015-06-01T00:00:00'

for sensor in sensors:
	uuid = sensor['uuid']
	sensorpoints = bdDS.list_sensorpoints(uuid, offset=0, limit=2)
	pointsDict = dict()
	sensorpoints = sensorpoints['sensorpoints']
	for sensorpoint in sensorpoints:
		pntName = sensorpoint['description']
		pointsDict[pntName] = {"start":beginTimeStr, "stop":endTimeStr}
	batchQ[uuid] = pointsDict

#data = bdm.get_batch_json(batchQ)
data = bdDS.get_timeseries_datapoints_batch(batchQ, timeout=1000)
#print data.content

with open("RM-2150.json", 'wb') as fp:
	json.dump(data, fp)
コード例 #8
0
ファイル: collect_data.py プロジェクト: davio606/Plaster
# Logger configuration
logger = logging.getLogger("data_collection_log")
logger.setLevel(logging.INFO)
log_handler = logging.FileHandler('log/data_collection.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
logger.addHandler(log_handler)

# BD2-1 Initialization
with open("config/bd2-1config.json", "r") as fp:
    hostname = json.load(fp)['hostname']
with open("config/bd2-1secrets.json", "r") as fp:
    secrets = json.load(fp)
    username = secrets['username']
    apikey = secrets['apikey']
bd2_1ds = DataService(hostname, apikey, username)

# BD2-2 Initialization
with open("config/bd2-2config.json", "r") as fp:
    hostname = json.load(fp)['hostname']
with open("config/bd2-2secrets.json", "r") as fp:
    secrets = json.load(fp)
    username = secrets['username']
    apikey = secrets['apikey']
bd2_2ds = DataService(hostname, apikey, username)

# BD3 Initialization
with open("config/bd3config.json", "r") as fp:
    hostname = json.load(fp)['hostname']
with open("config/bd3secrets.json", "r") as fp:
    secrets = json.load(fp)