def __init__(self): self.client = Client(API_KEY, API_KEY, API_SECRET) self.series_filename = os.path.join(EXPORT_DIR, "series_info") if not os.path.exists(EXPORT_DIR): print "Making export directory" os.makedirs(EXPORT_DIR)
class Exporter: def __init__(self): self.client = Client(API_KEY, API_KEY, API_SECRET) self.series_filename = os.path.join(EXPORT_DIR, "series_info") if not os.path.exists(EXPORT_DIR): print "Making export directory" os.makedirs(EXPORT_DIR) def export_metadata(self): if os.path.isfile(self.series_filename): print "series_info exists, skipping series discovery" return print "Exporting series metadata" all_series = self.client.list_series() with open(self.series_filename, 'w') as outfile: for series in all_series.data: line = self.series_to_string(series) + "\n" outfile.write(line.encode("utf-8")) def series_to_string(self, series): local_id = uuid.uuid4() j = {"uuid": str(local_id)} # Generate series UUID since keys for p in series.properties: # could be inconvenient filenames j[p] = getattr(series, p) return json.dumps(j, ensure_ascii=False) def export_all_series(self): if not os.path.isfile(self.series_filename): print "ERROR: No series_info file found, can't export series data" return with open(self.series_filename, 'r') as series_list: for text in series_list: series = json.loads(text) self.export_single_series(series.get('key'), series.get('uuid')) def export_single_series(self, key, uuid): filename = os.path.join(EXPORT_DIR, uuid + ".csv") if os.path.isfile(filename): print "Data file exists for series " + key + ", skipping" return print "Exporting series " + key + " to " + filename response = self.client.read_data(key=key.encode('utf-8'), start=START_DATE, end=END_DATE) with open(filename, 'w') as outfile: for dp in response.data: line = dp.t.isoformat() + "," + str(dp.v) + "\n" outfile.write(line)
class Exporter: def __init__(self): self.client = Client(API_KEY, API_KEY, API_SECRET) self.series_filename = os.path.join(EXPORT_DIR, "series_info") if not os.path.exists(EXPORT_DIR): print "Making export directory" os.makedirs(EXPORT_DIR) def export_metadata(self): if os.path.isfile(self.series_filename): print "series_info exists, skipping series discovery" return print "Exporting series metadata" all_series = self.client.list_series() with open(self.series_filename, 'w') as outfile: for series in all_series: line = self.series_to_string(series) + "\n" outfile.write(line.encode("utf-8")) def series_to_string(self, series): local_id = uuid.uuid4() j = {"uuid": str(local_id)} # Generate series UUID since keys for p in series.properties: # could be inconvenient filenames j[p] = getattr(series, p) return json.dumps(j, ensure_ascii=False) def export_all_series(self): if not os.path.isfile(self.series_filename): print "ERROR: No series_info file found, can't export series data" return with open(self.series_filename, 'r') as series_list: for text in series_list: series = json.loads(text) self.export_single_series(series.get('key'), series.get('uuid')) def export_single_series(self, key, uuid): filename = os.path.join(EXPORT_DIR, uuid + ".csv") if os.path.isfile(filename): print "Data file exists for series " + key + ", skipping" return print "Exporting series " + key + " to " + filename response = self.client.read_data(key=key.encode('utf-8'), start=START_DATE, end=END_DATE) with open(filename, 'w') as outfile: for dp in response: line = dp.t.isoformat() + "," + str(dp.v) + "\n" outfile.write(line)
def __init__(self, base_key, columns): try: api_key = os.environ['TEMPODB_API_KEY'] api_sec = os.environ['TEMPODB_API_SECRET'] except KeyError: raise RuntimeError("You must define environment variables " "TEMPODB_API_KEY and TEMPODB_API_SECRET") self.base_key = base_key self.columns = columns self.client = Client(self.DATABASE_ID, api_key, api_sec)
class TempoDBWriter(object): DATABASE_ID = "clock" def __init__(self, base_key, columns): try: api_key = os.environ['TEMPODB_API_KEY'] api_sec = os.environ['TEMPODB_API_SECRET'] except KeyError: raise RuntimeError("You must define environment variables " "TEMPODB_API_KEY and TEMPODB_API_SECRET") self.base_key = base_key self.columns = columns self.client = Client(self.DATABASE_ID, api_key, api_sec) def write(self, data): t = data['time'] logger.debug("Data: %s", data) points = [DataPoint.from_data(t, float(data[k]), key='%s.%s' % (self.base_key, k)) for k in self.columns if k != 'time'] resp = self.client.write_multi(points) if resp.status != 200: raise Exception("TempoDB error [%d] %s" % (resp.status, resp.error))
def __init__(self, scheme, create_devices=True, write_data=True, start_date="2000-01-01T00:00:00Z", end_date="2014-12-31T00:00:00Z", pool_size=3): self.scheme = scheme self.create_devices = create_devices self.should_write_data = write_data self.start_date = start_date self.end_date = end_date self.tdb = TDBClient(scheme.db_key, scheme.db_key, scheme.db_secret, base_url=scheme.db_baseurl) iq_endpoint = HTTPEndpoint(scheme.iq_baseurl, scheme.iq_key, scheme.iq_secret) self.tiq = TIQClient(iq_endpoint) self.queue = JoinableQueue() self.lock = Lock() self.dp_count = 0 self.req_count = 0 self.dp_reset = time.time() for i in range(pool_size): gevent.spawn(self.worker)
class TempoDBWriter(object): DATABASE_ID = "clock" def __init__(self, base_key, columns): try: api_key = os.environ['TEMPODB_API_KEY'] api_sec = os.environ['TEMPODB_API_SECRET'] except KeyError: raise RuntimeError("You must define environment variables " "TEMPODB_API_KEY and TEMPODB_API_SECRET") self.base_key = base_key self.columns = columns self.client = Client(self.DATABASE_ID, api_key, api_sec) def write(self, data): t = data['time'] logger.debug("Data: %s", data) points = [ DataPoint.from_data(t, float(data[k]), key='%s.%s' % (self.base_key, k)) for k in self.columns if k != 'time' ] resp = self.client.write_multi(points) if resp.status != 200: raise Exception("TempoDB error [%d] %s" % (resp.status, resp.error))
def import_channel_to_tempodb(tdms_channel, series_key=None, chunk_size=2000): """ :param tdms_channel: TDMS channel :param series_key: If none, it will try to use the name found in the TDMS_object :return: """ if series_key is None: series_key = tdms_channel.path print "\n", series_key tc_data = tdms_channel.data tc_time = tdms_channel.time_track() wf_start_time = tdms_channel.property('wf_start_time') data_size = len(tc_data) time_size = len(tc_time) if data_size != time_size: raise "Length of channel data and time are not equal (%i != %i)" % data_size, time_size client = Client(DATABASE_ID, API_KEY, API_SECRET) write_channel_attributes(tdms_channel, series_key, client) tempo_data = [] start_time = datetime.now() i = 0 for item_t, item_d in itertools.izip(tc_time, tc_data): # TODO: see if DataPoint.from_data can be any faster ... possibly create a CSV and then import the CSV # TODO: determine if item_d could lose some precision by casting to float # TODO: use proper units (e.g. look for h for hour or s for seconds) tempo_data.append( DataPoint.from_data( convert_offset_to_iso8601(item_t, wf_start_time), float(item_d))) if i % chunk_size == 0 and i > 0: write_to_tempo_db(client, i, series_key, tempo_data) tempo_data = [] i += 1 if len(tempo_data) > 0: write_to_tempo_db(client, i, series_key, tempo_data) del tempo_data end_time = datetime.now() duration = end_time - start_time print start_time, end_time, duration print "Data size: %i" % data_size print "Points/sec: %.2f" % (data_size / duration.total_seconds()) return
class Migrator: def __init__(self, scheme, create_devices=True, write_data=True, start_date="2000-01-01T00:00:00Z", end_date="2014-12-31T00:00:00Z", pool_size=3): self.scheme = scheme self.create_devices = create_devices self.should_write_data = write_data self.start_date = start_date self.end_date = end_date self.tdb = TDBClient(scheme.db_key, scheme.db_key, scheme.db_secret, base_url=scheme.db_baseurl) iq_endpoint = HTTPEndpoint(scheme.iq_baseurl, scheme.iq_key, scheme.iq_secret) self.tiq = TIQClient(iq_endpoint) self.queue = JoinableQueue() self.lock = Lock() self.dp_count = 0 self.req_count = 0 self.dp_reset = time.time() for i in range(pool_size): gevent.spawn(self.worker) def worker(self): while True: series = self.queue.get() try: self.migrate_series(series) finally: self.queue.task_done() def migrate_all_series(self, start_key="", limit=None): start_time = time.time() (keys, tags, attrs) = self.scheme.identity_series_filter() series_set = self.tdb.list_series(keys, tags, attrs) # Keep our own state of whether we passed the resume point, so we don't # need to assume client and server sort strings the same. found_first_series = False series_count = 0 for series in series_set: if not found_first_series and series.key < start_key: continue else: found_first_series = True if limit and series_count >= limit: print("Reached limit of %d devices, stopping." % (limit)) break if self.scheme.identity_series_client_filter(series): # If the series looks like an identity series, # queue it to be processed by the threadpool self.queue.put(series) series_count += 1 self.queue.join() end_time = time.time() print("Exporting {} devices took {} seconds".format(series_count, end_time - start_time)) def migrate_series(self, series): print(" Beginning to migrate series: %s" % (series.key)) error = False try: if self.create_devices: error = self.create_device(series) if self.should_write_data and not error: error = self.write_data(series) except Exception, e: logging.exception(e) error = True if not error: print("COMPLETED migrating for series %s" % (series.key)) else: print("ERROR migrating series %s" % (series.key))
class Migrator: def __init__(self, scheme, create_devices=True, write_data=True, start_date="2000-01-01T00:00:00Z", end_date="2014-12-31T00:00:00Z", pool_size=3): self.scheme = scheme self.create_devices = create_devices self.should_write_data = write_data self.start_date = start_date self.end_date = end_date self.tdb = TDBClient(scheme.db_key, scheme.db_key, scheme.db_secret, base_url=scheme.db_baseurl) iq_endpoint = HTTPEndpoint(scheme.iq_baseurl, scheme.iq_key, scheme.iq_secret) self.tiq = TIQClient(iq_endpoint) self.queue = JoinableQueue() self.lock = Lock() self.dp_count = 0 self.req_count = 0 self.dp_reset = time.time() for i in range(pool_size): gevent.spawn(self.worker) def worker(self): while True: series = self.queue.get() try: self.migrate_series(series) finally: self.queue.task_done() def migrate_all_series(self, start_key="", limit=None): start_time = time.time() (keys, tags, attrs) = self.scheme.identity_series_filter() series_set = self.tdb.list_series(keys, tags, attrs) # Keep our own state of whether we passed the resume point, so we don't # need to assume client and server sort strings the same. found_first_series = False series_count = 0 for series in series_set: if not found_first_series and series.key < start_key: continue else: found_first_series = True if limit and series_count >= limit: print("Reached limit of %d devices, stopping." % (limit)) break if self.scheme.identity_series_client_filter(series): # If the series looks like an identity series, # queue it to be processed by the threadpool self.queue.put(series) series_count += 1 self.queue.join() end_time = time.time() print("Exporting {} devices took {} seconds".format( series_count, end_time - start_time)) def migrate_series(self, series): print(" Beginning to migrate series: %s" % (series.key)) error = False try: if self.create_devices: error = self.create_device(series) if self.should_write_data and not error: error = self.write_data(series) except Exception, e: logging.exception(e) error = True if not error: print("COMPLETED migrating for series %s" % (series.key)) else: print("ERROR migrating series %s" % (series.key))
__author__ = 'paulmestemaker' import datetime import random from tempodb.client import Client from tempodb.protocol import DataPoint from secrets import API_KEY, API_SECRET, DATABASE_ID # Modify these with your credentials found at: http://tempo-db.com/manage/ SERIES_KEYS = ['paul-multi-1-1', 'paul-multi-1-2', 'paul-multi-1-3'] client = Client(DATABASE_ID, API_KEY, API_SECRET) date = datetime.datetime(2012, 1, 1) for day in range(1, 10): # print out the current day we are sending data for print date data = [] # 1440 minutes in one day for min in range(1, 1441): for series in SERIES_KEYS: data.append(DataPoint.from_data(date, random.random() * 50.0, key=series)) date = date + datetime.timedelta(minutes=1) resp = client.write_multi(data) print 'Response code:', resp.status if resp.status != 200: print 'Error reason:', resp.error
__author__ = 'paulmestemaker' import datetime import random from tempodb.client import Client from tempodb.protocol import DataPoint from secrets import API_KEY, API_SECRET, DATABASE_ID # Modify these with your credentials found at: http://tempo-db.com/manage/ # DATABASE_ID = 'my-id' # API_KEY = DATABASE_ID # API_SECRET = 'my-secret' SERIES_KEY = 'temp-1' client = Client(DATABASE_ID, API_KEY, API_SECRET) date = datetime.datetime(2012, 1, 1) for day in range(1, 10): # print out the current day we are sending data for print date data = [] # 1440 minutes in one day for min in range(1, 1441): data.append(DataPoint.from_data(date, random.random() * 50.0)) date = date + datetime.timedelta(minutes=1) resp = client.write_data(SERIES_KEY, data) print 'Response code:', resp.status if resp.status != 200:
from tempodb.client import Client # # API_KEY = 'a68ffbe8f6fe4fb3bbda2782002680f0' # API_SECRET = '3fe37f49b1bb4ae481dec13932c9bb92' # SERIES_KEY = 'paul-python-1' DATABASE_ID = 'fisi' # API_KEY = DATABASE_ID # Currently API_KEY is the same as DATABASE_ID API_KEY = 'a68ffbe8f6fe4fb3bbda2782002680f0' API_SECRET = '3fe37f49b1bb4ae481dec13932c9bb92' client = Client(DATABASE_ID, API_KEY, API_SECRET) try: client.create_series('paul-python-2014-06-12') except tempodb.response.ResponseException as e: print "There was an error" print e response = client.get_series('paul-python-2014-06-12') series1 = response.data series1.name = 'foobar' series1.tags = ['baz', 'abc'] series1.attributes = {'foo': 'bar'} client.update_series(series1) import datetime import random
from __future__ import print_function import sys, os from tempodb.client import Client from flask import Flask, request, Response app = Flask(__name__) for k in ['TEMPODB_DATABASE_ID', 'TEMPODB_API_KEY', 'TEMPODB_API_SECRET']: if k not in os.environ: print("Missing environment variable: {}".format(k), file=sys.stderr) sys.exit(1) client = Client(os.environ['TEMPODB_DATABASE_ID'], os.environ['TEMPODB_API_KEY'], os.environ['TEMPODB_API_SECRET']) # From http://gear11.com/2013/12/python-proxy-server/ CHUNK_SIZE = 1024 def convert_response(r): headers = dict(r.headers) def generate(): for chunk in r.iter_content(CHUNK_SIZE): yield chunk headers['Access-Control-Allow-Origin'] = '*' return Response(generate(), status=r.status_code, headers=headers) @app.route('/tempodb/<path:path>')