def main(host='localhost', port=8086, nb_day=15): nb_day = 15 # number of day to generate time series timeinterval_min = 5 # create an event every x minutes total_minutes = 1440 * nb_day total_records = int(total_minutes / timeinterval_min) now = datetime.datetime.today() cpu_series = [{ 'name': "server_data.cpu_idle", 'columns': ["time", "value", "hostName"], 'points': [] }] for i in range(0, total_records): past_date = now - datetime.timedelta(minutes=i * timeinterval_min) value = random.randint(0, 200) hostName = "server-%d" % random.randint(1, 5) pointValues = [int(past_date.strftime('%s')), value, hostName] cpu_series[0]['points'].append(pointValues) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) print("Write points #: {0}".format(total_records)) client.write_points(cpu_series) query = 'SELECT MEAN(value) FROM server_data.cpu_idle GROUP BY time(30m) WHERE time > now() - 1d;' print("Queying data: " + query) result = client.query(query) print("Result: {0}".format(result)) print("Delete database: " + DBNAME) client.delete_database(DBNAME)
def main(host='localhost', port=8086, nb_day=15): nb_day = 15 # number of day to generate time series timeinterval_min = 5 # create an event every x minutes total_minutes = 1440 * nb_day total_records = int(total_minutes / timeinterval_min) now = datetime.datetime.today() cpu_series = [{ 'name': "server_data.cpu_idle", 'columns': ["time", "value", "hostName"], 'points': [] }] for i in range(0, total_records): past_date = now - datetime.timedelta(minutes=i * timeinterval_min) value = random.randint(0, 200) hostName = "server-%d" % random.randint(1, 5) pointValues = [int(past_date.strftime('%s')), value, hostName] cpu_series[0]['points'].append(pointValues) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) print("Write points #: {0}".format(total_records)) client.write_points(cpu_series) query = 'SELECT MEAN(value) FROM server_data.cpu_idle GROUP BY time(30m) WHERE time > now() - 1d;' print("Queying data: " + query) result = client.query(query) print("Result: {0}".format(result)) print("Delete database: " + DBNAME) client.delete_database(DBNAME)
def main(host='localhost', port=8086): user = '******' password = '******' dbname = 'example' dbuser = '******' dbuser_password = '******' query = 'select column_one from foo;' json_body = [{ "points": [ ["1", 1, 1.0], ["2", 2, 2.0] ], "name": "foo", "columns": ["column_one", "column_two", "column_three"] }] client = InfluxDBClient(host, port, user, password, dbname) print("Create database: " + dbname) client.create_database(dbname) dbusers = client.get_database_users() print("Get list of database users: {0}".format(dbusers)) print("Add database user: "******"Make user a database admin") client.set_database_admin(dbuser) print("Remove admin privilege from user") client.unset_database_admin(dbuser) dbusers = client.get_database_users() print("Get list of database users again: {0}".format(dbusers)) print("Switch user: "******"Write points: {0}".format(json_body)) client.write_points(json_body) print("Queying data: " + query) result = client.query(query) print("Result: {0}".format(result)) print("Switch user: "******"Delete database: " + dbname) client.delete_database(dbname)
def main(host='localhost', port=8086): user = '******' password = '******' dbname = 'Airfare' json_body = get_airline_data() client = InfluxDBClient(host, port, user, password, dbname) client.delete_database(dbname) print("Create database: " + dbname) client.create_database(dbname) print("Writing data") client.write_points(json_body)
def main(host='localhost', port=8086): user = '******' password = '******' dbname = 'Airfare' json_body = get_airline_data() client = InfluxDBClient(host, port, user, password, dbname) client.delete_database(dbname) print("Create database: " + dbname) client.create_database(dbname) print("Writing data") client.write_points(json_body)
class TestResponseParser(unittest.TestCase): def setUp(self): self.client = InfluxDBClient('127.0.0.1', 8086, 'root', 'root') self.database_name = 'test_%s' % (time.time()) self.client.create_database(self.database_name) self.client.switch_db(self.database_name) now = datetime.utcnow() then = now - timedelta(hours=4) self.points = geneate_points(then, now, timedelta(seconds=10), 10) test_data = [{ "name": "test", "columns": ["time", "key"], "points": self.points }] self.client.write_points(data=test_data, batch_size=2000) def tearDown(self): self.client.delete_database(self.database_name) def test_simple(self): query = InfluxQuery.for_series('test').limit(None) client = INDBClient(conn=self.client) resp = client.result_for_query(query) series = resp.get('test') self.assertEqual(len(series), len(self.points)) query = query.limit(10) resp = client.result_for_query(query) series = resp.get('test') assert len(series) == 10 query = InfluxQuery.for_series('test').columns(InfluxQuery.count('key')).limit(None) resp = client.result_for_query(query) series = resp.get('test') self.assertEqual(series[0].count, len(self.points)) def test_groups(self): q = InfluxQuery.for_series('test').columns(InfluxQuery.count('key')) q = q.limit(None) q = q.group_by(InfluxQuery.time('1h')) client = INDBClient(conn=self.client) resp = client.result_for_query(q) series = resp.get('test') assert sum(map(lambda x: x.count, series)) == len(self.points)
def main(host='localhost', port=8086): """ main function to generate the sin wave """ now = datetime.datetime.today() data = [{ 'name': "foobar", 'columns': ["time", "value"], 'points': [] }] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 point = [int(now.strftime('%s')) + angle, y] data[0]['points'].append(point) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) #Write points client.write_points(data) query = 'SELECT time, value FROM foobar GROUP BY value, time(1s)' print("Queying data: " + query) result = client.query(query) print("Result: {0}".format(result)) """ You might want to comment the delete and plot the result on InfluxDB Interface Connect on InfluxDB Interface at http://127.0.0.1:8083/ Select the database tutorial -> Explore Data Then run the following query: SELECT time, value FROM foobar GROUP BY value, time(1s) """ print("Delete database: " + DBNAME) client.delete_database(DBNAME)
class InfluxDBClientTest(unittest.TestCase): def generateData(self): now = datetime.utcnow() then = now - timedelta(hours=2) self.points = geneate_points(then, now, timedelta(seconds=10), 5) test_data = [{ "name": "page_views", "columns": ["time", "author_id", "category_id", "name"], "points": self.points }] self.client.write_points(data=test_data, batch_size=500) def setUp(self): self.client = InfluxDBClient('localhost', 8086, 'root', 'root') self.database_name = 'test_%s' % (time.time()) self.client.create_database(self.database_name) self.client.switch_db(self.database_name) def tearDown(self): self.client.delete_database(self.database_name)
def main(host='localhost', port=8086): """ main function to generate the sin wave """ now = datetime.datetime.today() data = [{'name': "foobar", 'columns': ["time", "value"], 'points': []}] for angle in range(0, 360): y = 10 + math.sin(math.radians(angle)) * 10 point = [int(now.strftime('%s')) + angle, y] data[0]['points'].append(point) client = InfluxDBClient(host, port, USER, PASSWORD, DBNAME) print("Create database: " + DBNAME) client.create_database(DBNAME) #Write points client.write_points(data) query = 'SELECT time, value FROM foobar GROUP BY value, time(1s)' print("Queying data: " + query) result = client.query(query) print("Result: {0}".format(result)) """ You might want to comment the delete and plot the result on InfluxDB Interface Connect on InfluxDB Interface at http://127.0.0.1:8083/ Select the database tutorial -> Explore Data Then run the following query: SELECT time, value FROM foobar GROUP BY value, time(1s) """ print("Delete database: " + DBNAME) client.delete_database(DBNAME)
def test_delete_database_fails(self): with patch.object(requests, 'delete') as mocked_post: mocked_post.return_value = _build_response_object(status_code=401) cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db')
class Broker: def __init__(self, io_loop, sqluri, ssh_key, ssh_username, heka_options, influx_options, aws_port=None, aws_owner_id="595879546273", aws_use_filters=True, aws_access_key=None, aws_secret_key=None): self.loop = io_loop self._base_env = BASE_ENV.copy() user_data = _DEFAULTS["user_data"] if user_data is not None and os.path.exists(user_data): with open(user_data) as f: user_data = f.read() influx_args = { "host": influx_options.host, "port": influx_options.port, "username": influx_options.user, "password": influx_options.password, "database": "loads" } if influx_options.secure: influx_args["ssl"] = True influx_args["verify_ssl"] = True self.influx = InfluxDBClient(**influx_args) self.pool = aws.EC2Pool("1234", user_data=user_data, io_loop=self.loop, port=aws_port, owner_id=aws_owner_id, use_filters=aws_use_filters, access_key=aws_access_key, secret_key=aws_secret_key) # Utilities used by RunManager ssh = SSH(ssh_keyfile=ssh_key) self.run_helpers = run_helpers = RunHelpers() run_helpers.cadvisor = CAdvisor(influx_options) run_helpers.ping = Ping(self.loop) run_helpers.docker = Docker(ssh) run_helpers.dns = DNSMasq(run_helpers.docker) run_helpers.heka = Heka(ssh=ssh, options=heka_options) self.db = Database(sqluri, echo=True) self.sqluri = sqluri self.ssh_key = ssh_key self.ssh_username = ssh_username self._local_docker = DockerDaemon(host="tcp://0.0.0.0:2375") # Run managers keyed by uuid self._runs = {} def shutdown(self): self.pool.shutdown() self._print_status() @gen.coroutine def _print_status(self): while True: if not len(self._runs): logger.debug("Status: No runs in progress.") for uuid, mgr in self._runs.items(): run = mgr.run logger.debug("Run state for %s: %s - %s", run.uuid, status_to_text(mgr.state), mgr.state_description) yield gen.Task(self.loop.add_timeout, time.time() + 10) def get_runs(self, fields=None): # XXX filters, batching log_threadid("Getting runs") runs = self.db.session().query(Run).all() return [run.json(fields) for run in runs] def _get_run(self, run_id): session = self.db.session() try: run = session.query(Run).filter(Run.uuid == run_id).one() except NoResultFound: run = None return run, session @gen.coroutine def _test(self, session, mgr, future): try: response = yield future logger.debug("Got response of: %s", response) except: logger.error("Got an exception", exc_info=True) # logger.debug("Reaping the pool") # yield self.pool.reap_instances() # logger.debug("Finished terminating.") def run_test(self, **options): session = self.db.session() # loading all options curl = "" image_url = options.get('image_url', curl) instance_count = options.get('nodes', 1) image_name = options.get('image_name', "kitcambridge/pushtest:latest") strategy_name = options.get('strategy_name', 'strategic!') cset_name = options.get('cset_name', 'MyContainerSet') strategy = session.query(Strategy).filter_by( name=strategy_name).first() environ = { "PUSH_TEST_MAX_CONNS": 10000, "PUSH_TEST_ADDR": "ws://ec2-54-69-50-64.us-west-2.compute.amazonaws.com:8080", "PUSH_TEST_STATS_ADDR": "ec2-54-69-254-24.us-west-2.compute.amazonaws.com:8125"} if not strategy: # the first thing to do is to create a container set and a strategy cs = ContainerSet(name=cset_name, instance_count=instance_count, run_max_time=10, container_name=image_name, container_url=image_url, environment_data=dict2str(environ)) strategy = Strategy(name=strategy_name, container_sets=[cs]) session.add(strategy) session.commit() log_threadid("Run_test") # now we can start a new run mgr, future = RunManager.new_run(self.run_helpers, session, self.pool, self.loop, strategy_name) callback = partial(self._test, session, mgr) future.add_done_callback(callback) self._runs[mgr.run.uuid] = mgr # create an Influx Database # self._create_dbs(mgr.run.uuid) # and start a Grafana container for our run # self._start_grafana(mgr.run.uuid) return mgr.run.uuid def _create_dbs(self, run_id): names = [run_id, "%s-cadvisor" % run_id] def create_database(name): return self.influx.create_database(name) with concurrent.futures.ThreadPoolExecutor(len(names)) as e: results = e.map(create_database, names) return all(results) @gen.coroutine def _start_grafana(self, run_id): environment = {'HTTP_USER': '******', 'HTTP_PASS': '******', 'INFLUXDB_HOST': 'localhost', 'INFLUXDB_NAME': run_id} ports = [80] # XXX we want one port per grafana and let the broker # hold a mapping {run_id: grafana port} # so we can display the dashboard link port_bindings = {80: 8088} result = self._executer.submit(self._local_docker.run_container, 'tutum/grafana', environment=environment, ports=ports) container = result["Id"] self._local_docker.start(container, port_bindings=port_bindings) yield container def delete_run(self, run_id): run, session = self._get_run(run_id) self.influx.delete_database(run_id) session.delete(run) session.commit()
import pypyodbc import pandas as pd import datetime import time cnxn = pypyodbc.connect("Driver={SQL Server};" "Server=LLSANDBOX-PC\SQLEXPRESS;" "Database=HBT.ECD.Cognipoint;" "uid=SQLadmin;pwd=LL22@@") ### InfluxDB info #### from influxdb import InfluxDBClient influx_db_name = "" influxClient = InfluxDBClient("<INFLUX_HOST>", "<INFLUX_PORT>") influxClient.delete_database(influx_db_name) influxClient.create_database(influx_db_name) # dictates how columns will be mapped to key/fields in InfluxDB schema = { "time_column": "", # the column that will be used as the time stamp in influx "columns_to_fields": ["", ...], # columns that will map to fields "columns_to_tags": ["", ...], # columns that will map to tags "table_name_to_measurement": "", # table name that will be mapped to measurement } ''' Generates an collection of influxdb points from the given SQL records '''
def test_delete_database(self): with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') assert cli.delete_database('old_db') is True
def test_delete_database_fails(self): with _mocked_session('delete', 401) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db')
def test_delete_database_fails(self): with patch.object(session, 'delete') as mocked_post: mocked_post.return_value = _build_response_object(status_code=401) cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db')
def test_delete_database_fails(self): with patch.object(requests, "delete") as mocked_post: mocked_post.return_value = _build_response_object(status_code=401) cli = InfluxDBClient("host", 8086, "username", "password", "db") cli.delete_database("old_db")
def test_delete_database_fails(self): with _mocked_session("delete", 401) as mocked: cli = InfluxDBClient("host", 8086, "username", "password", "db") cli.delete_database("old_db")
def test_delete_database(self): with _mocked_session("delete", 204) as mocked: cli = InfluxDBClient("host", 8086, "username", "password", "db") assert cli.delete_database("old_db") is True
def test_delete_database_fails(self): with _mocked_session('delete', 401) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') cli.delete_database('old_db')
def test_delete_database(self): with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') assert cli.delete_database('old_db') is True
class InfluxdbHandler(object): """docstring for InfluxdbHandler""" def __init__(self, host, port, user, password, dbname, serie_name): """ Constructor """ self._host, self._port, self._user, self._password, self._dbname = host, port, user, password, dbname self.added_points = 0 self.client = None self.columns = [] self.serie_name = serie_name self._prepare_write() self._prepare_connect() def _prepare_write(self): self.write_series = [{ 'name': self.serie_name, 'columns': self.columns, 'points': [] }] def _prepare_connect(self): self._connect_client() def _connect_client(self): """ Configure the client to InfluxDB API """ self.client = InfluxDBClient(self._host, self._port, self._user, self._password, self._dbname) def set_columns(self, new_columns): """ Set the columns to new value new_columns """ self.columns = new_columns self.write_series[0]['columns'] = new_columns def add_points(self, values): """ Add points to the serie """ self.added_points += 1 self.write_series[0]['points'].append(values) def commit(self): """ write the serie to the InfluxDB client """ print self.added_points print self.write_series if self.added_points > 0: self.added_points = 0 self.client.write_points(self.write_series) # print self.write_series def delete_series(self): """ delete the serie to the InfluxDB client """ self.client.delete_series(self.serie_name) def empty_points(self): """ Reset the json document used to write series """ self.added_points = 0 self.write_series = [{ 'name': self.serie_name, 'columns': self.columns, 'points': [] }] def query(self, query): """ Query Database Return first element """ print query return self.client.query(query)[0] def create_database(self): """ Create Database """ self.client.create_database(self._dbname) def delete_database(self): """ Delete Database """ self.client.delete_database(self._dbname) def query_column_aggr_time_group(self, column='country_id', time_bucket='1h', past='15d', aggr='COUNT'): query = ("SELECT {0}({1}) FROM {2} GROUP BY {3}, time({4}) fill(0) " "WHERE time > now() - {5}").format(aggr, column, self.serie_name, column, time_bucket, past) result = self.query(query) # print("Result: {0}".format(result)) return result def query_column_aggr_time(self, column='duration', time_bucket='1h', past='15d', aggr='MEAN'): """ SELECT MEAN(duration) FROM cdr GROUP BY time(30m) fill(0) WHERE time > now() - 10h """ query = ("SELECT {0}({1}) FROM {2} GROUP BY time({3}) fill(0) " "WHERE time > now() - {4}").format(aggr, column, self.serie_name, time_bucket, past) result = self.query(query) # print("Result: {0}".format(result)) return result
def test_delete_database(self): with patch.object(requests, 'delete') as mocked_post: mocked_post.return_value = _build_response_object(status_code=204) cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') assert cli.delete_database('old_db') is True
class InfluxdbHandler(object): """docstring for InfluxdbHandler""" def __init__(self, host, port, user, password, dbname, serie_name): """ Constructor """ self._host, self._port, self._user, self._password, self._dbname = host, port, user, password, dbname self.added_points = 0 self.client = None self.columns = [] self.serie_name = serie_name self._prepare_write() self._prepare_connect() def _prepare_write(self): self.write_series = [{ 'name': self.serie_name, 'columns': self.columns, 'points': [] }] def _prepare_connect(self): self._connect_client() def _connect_client(self): """ Configure the client to InfluxDB API """ self.client = InfluxDBClient(self._host, self._port, self._user, self._password, self._dbname) def set_columns(self, new_columns): """ Set the columns to new value new_columns """ self.columns = new_columns self.write_series[0]['columns'] = new_columns def add_points(self, values): """ Add points to the serie """ self.added_points += 1 self.write_series[0]['points'].append(values) def commit(self): """ write the serie to the InfluxDB client """ print self.added_points print self.write_series if self.added_points > 0: self.added_points = 0 self.client.write_points(self.write_series) # print self.write_series def delete_series(self): """ delete the serie to the InfluxDB client """ self.client.delete_series(self.serie_name) def empty_points(self): """ Reset the json document used to write series """ self.added_points = 0 self.write_series = [{ 'name': self.serie_name, 'columns': self.columns, 'points': [] }] def query(self, query): """ Query Database Return first element """ print query return self.client.query(query)[0] def create_database(self): """ Create Database """ self.client.create_database(self._dbname) def delete_database(self): """ Delete Database """ self.client.delete_database(self._dbname) def query_column_aggr_time_group(self, column='country_id', time_bucket='1h', past='15d', aggr='COUNT'): query = ("SELECT {0}({1}) FROM {2} GROUP BY {3}, time({4}) fill(0) " "WHERE time > now() - {5}").format(aggr, column, self.serie_name, column, time_bucket, past) result = self.query(query) # print("Result: {0}".format(result)) return result def query_column_aggr_time(self, column='duration', time_bucket='1h', past='15d', aggr='MEAN'): """ SELECT MEAN(duration) FROM cdr GROUP BY time(30m) fill(0) WHERE time > now() - 10h """ query = ("SELECT {0}({1}) FROM {2} GROUP BY time({3}) fill(0) " "WHERE time > now() - {4}").format(aggr, column, self.serie_name, time_bucket, past) result = self.query(query) # print("Result: {0}".format(result)) return result