def configure_binding(binder): from brick_data.timeseries import BrickTimeseries from brick_data.sparql import BrickSparql from brick_server.extensions.lockmanager import LockManager brick_ts_configs = configs['timeseries'] brick_ts = BrickTimeseries( brick_ts_configs['dbname'], brick_ts_configs['user'], brick_ts_configs['password'], brick_ts_configs['host'], brick_ts_configs['port'], ) lockmanager_configs = configs['lockmanager'] lock_manager = LockManager( lockmanager_configs['host'], lockmanager_configs['port'], lockmanager_configs['dbname'], lockmanager_configs['user'], lockmanager_configs['password'], ) brick_configs = configs['brick'] if configs['server']['use_hostname_as_ns']: base_ns = 'http://{hostname}{api_prefix}{entity_api_prefix}/'.format( hostname=configs['server']['hostname'], api_prefix=API_V1_PREFIX, entity_api_prefix=entity_api.path) else: base_ns = brick_configs['base_ns'] brick_sparql = BrickSparql( brick_configs['host'], brick_configs['brick_version'], #base_ns=brick_configs['base_ns'], base_ns=base_ns, load_schema=True, ) binder.bind(BrickTimeseries, to=brick_ts) binder.bind(BrickSparql, to=brick_sparql) binder.bind(LockManager, to=lock_manager)
lockmanager_configs['port'], lockmanager_configs['dbname'], lockmanager_configs['user'], lockmanager_configs['password'], ) actuation_iface = DummyActuation() brick_configs = configs['brick'] brick_sparql = BrickSparqlAsync(brick_configs['host'], brick_configs['brick_version'], graph=brick_configs['base_graph'], base_ns=brick_configs['base_ns']) brick_sparql_sync = BrickSparql(brick_configs['host'], brick_configs['brick_version'], graph=brick_configs['base_graph'], base_ns=brick_configs['base_ns']) asyncio.ensure_future(brick_sparql.load_schema()) brick_ts_configs = configs['timeseries'] ts_db = AsyncpgTimeseries( brick_ts_configs['dbname'], brick_ts_configs['user'], brick_ts_configs['password'], brick_ts_configs['host'], brick_ts_configs['port'], ) try: asyncio.ensure_future(ts_db.init()) except asyncpg.exceptions.DuplicateTableError:
from brick_data.sparql import BrickSparql brick_endpoint = BrickSparql('http://localhost:8890/sparql', '1.0.2') brick_endpoint.load_rdffile('ebu3b_brick.ttl')
#insert_data import asyncio import numpy as np import arrow import pdb import random import sys sys.path.append('./') from brick_data.timeseries import AsyncpgTimeseries from brick_data.sparql import BrickSparql brick_db = BrickSparql( sparql_url='http://localhost:8890/sparql', brick_version='1.0.3', graph='http://example.com', base_ns='http://example.com#', username='******', password='******', ) ts_db = AsyncpgTimeseries( dbname='brick', user='******', pw='brick-demo', host='localhost', port=5432, ) def gen_random_data(point_type, begin_time, end_time, srcid): latency_base = 300 # seconds latency_noise_factor = 30 # seconds
#insert_data import numpy as np import arrow import pdb import random from brick_data.timeseries import * from brick_data.sparql import BrickSparql brick_db = BrickSparql('http://localhost:8890/sparql', '1.0.3') ts_db = SqlalchemyTimeseries( dbname = 'brick', user = '******', pw = 'brick-demo', host = 'localhost', port = 6001 ) def gen_random_data(point_type, begin_time, end_time, srcid): latency_base = 300 # seconds latency_noise_factor = 30 # seconds if point_type == 'Zone_Temperature_Sensor': max_val = 80 min_val = 60 day_interval = 24 * 60 * 60 noise_size = max_val * 0.01 # ratio noiser = np.vectorize( lambda x: x + random.random() * noise_size - noise_size / 2) data = [] # Add data with linear interpolation + noise t = begin_time
from io import StringIO import pdb from brick_data.sparql import BrickSparql brick_endpoint = BrickSparql('http://bd-testbed.ucsd.edu:8890/sparql', '1.0.3', graph='http://example.com', base_ns='http://example.com#', ) with open('examples/bldg.ttl', 'r') as fp: ttl_io = StringIO(fp.read()) brick_endpoint.load_rdffile(ttl_io)