def test_scheme(self): cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') assert cli._baseurl == 'http://host:8086' cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) assert cli._baseurl == 'https://host:8086'
def test_scheme(self): """Test database scheme for TestInfluxDBClient object.""" cli = InfluxDBClient('host', 8086, 'username', 'password', 'database') self.assertEqual(cli._baseurl, 'http://host:8086') cli = InfluxDBClient( 'host', 8086, 'username', 'password', 'database', ssl=True ) self.assertEqual(cli._baseurl, 'https://host:8086')
def test_write_points_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" ): cli.write_points(self.dummy_points, time_precision='g')
def test_write(self): with requests_mock.Mocker() as m: m.register_uri( requests_mock.POST, "http://localhost:8086/write" ) cli = InfluxDBClient(database='db') cli.write( {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"name": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", "values": {"value": 0.64}}]} ) self.assertEqual( json.loads(m.last_request.body), {"database": "mydb", "retentionPolicy": "mypolicy", "points": [{"name": "cpu_load_short", "tags": {"host": "server01", "region": "us-west"}, "timestamp": "2009-11-10T23:00:00Z", "values": {"value": 0.64}}]} )
def __gen_client(): return InfluxDBClient(host=INFLUXDB_PORT_8086_TCP_ADDR, port=INFLUXDB_PORT_8086_TCP_PORT, database=database, username=INFLUXDB_USERNAME, password=INFLUXDB_PASSWORD, timeout=INFLUXDB_TIMEOUT)
def test_query_chunked_unicode(self): """Test unicode chunked query for TestInfluxDBClient object.""" cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206212980, 10001, u('unicode-\xcf\x89')], [1415197271586, 10001, u('more-unicode-\xcf\x90')] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] )
def test_request_retry(self, mock_request): """Test that two connection errors will be handled.""" class CustomMock(object): """Define CustomMock object.""" def __init__(self): self.i = 0 def connection_error(self, *args, **kwargs): """Test connection error in CustomMock.""" self.i += 1 if self.i < 3: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') cli.write_points( self.dummy_points )
def get(self, request, app_name): try: app = request.user.webapp_set.get(name=app_name) now = int(datetime.now().timestamp()) # second end = int(request.GET.get("end", now)) start = request.GET.get("start", end - 3600) # 1 day influxdb_client = InfluxDBClient(settings.INFLUXDB["HOST"], settings.INFLUXDB["PORT"], settings.INFLUXDB["USERNAME"], settings.INFLUXDB["PASSWORD"], settings.INFLUXDB["DBNAME"]) mesos_app_id = "app-" + app.uuid query = "SELECT COUNT(DISTINCT(mesos_task_id)) as instances, MEAN(cpu_usage) as mean_cpu, MEAN(mem_usage) as mean_mem " query += "FROM monitoring " query += "WHERE app_uuid = '{}' and time > {}s and time < {}s ".format( mesos_app_id, start, end) query += "GROUP BY time(10s)" metrics = influxdb_client.query(query) if metrics: return JsonResponse({"data": metrics[0]["points"]}) return JsonResponse({"data": "sd"}) else: return JsonResponse({"data": query}) except Exception as e: traceback.print_exc() return JsonResponse({"data": "11"})
def test_query_bad_precision(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "Invalid time precision is given. \(use 's', 'm', 'ms' or 'u'\)" ): cli.query('select column_one from foo', time_precision='g')
def test_get_continuous_queries(self): cli = InfluxDBClient(database='db') with requests_mock.Mocker() as m: # Tip: put this in a json linter! example_response = '[ { "name": "continuous queries", "columns"' \ ': [ "time", "id", "query" ], "points": [ [ ' \ '0, 1, "select foo(bar,95) from \\"foo_bar' \ 's\\" group by time(5m) into response_times.' \ 'percentiles.5m.95" ], [ 0, 2, "select perce' \ 'ntile(value,95) from \\"response_times\\" g' \ 'roup by time(5m) into response_times.percen' \ 'tiles.5m.95" ] ] } ]' m.register_uri(requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response) self.assertListEqual(cli.get_list_continuous_queries(), [ 'select foo(bar,95) from "foo_bars" group ' 'by time(5m) into response_times.percentiles.5m.95', 'select percentile(value,95) from "response_times" group ' 'by time(5m) into response_times.percentiles.5m.95' ])
def test_query_chunked(self): cli = InfluxDBClient(database='db') example_object = { 'points': [ [1415206250119, 40001, 667], [1415206244555, 30001, 7], [1415206228241, 20001, 788], [1415206212980, 10001, 555], [1415197271586, 10001, 23] ], 'name': 'foo', 'columns': [ 'time', 'sequence_number', 'val' ] } example_response = \ json.dumps(example_object) + json.dumps(example_object) with requests_mock.Mocker() as m: m.register_uri( requests_mock.GET, "http://localhost:8086/db/db/series", text=example_response ) self.assertListEqual( cli.query('select * from foo', chunked=True), [example_object, example_object] )
def test_write_points_batch(self): with _mocked_session('post', 200, self.dummy_points): cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') assert cli.write_points( data=self.dummy_points, batch_size=2 ) is True
def test_write_points_batch_multiple_series(self): dummy_points = [ { "points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0]], "name": "foo", "columns": ["val1", "val2", "val3"] }, { "points": [["1", 1, 1.0], ["2", 2, 2.0], ["3", 3, 3.0], ["4", 4, 4.0], ["5", 5, 5.0], ["6", 6, 6.0], ["7", 7, 7.0], ["8", 8, 8.0]], "name": "bar", "columns": ["val1", "val2", "val3"] }, ] expected_last_body = [{ 'points': [['7', 7, 7.0], ['8', 8, 8.0]], 'name': 'bar', 'columns': ['val1', 'val2', 'val3'] }] with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=dummy_points, batch_size=3) self.assertEqual(m.call_count, 5) self.assertEqual(expected_last_body, m.request_history[4].json())
def check_heal(nodes_to_monitor, depl_id): if cooldown(): print('Exiting...\n') exit(0) c = CloudifyClient('localhost') c_influx = InfluxDBClient(host='localhost', port=8086, database='cloudify') f = open('/home/ubuntu/logfile', 'w') f.write('in check heal\n') c = CloudifyClient('localhost') # compare influx data (monitoring) to cloudify desired state for node_name in nodes_to_monitor: instances = c.node_instances.list(depl_id, node_name) # f.write('instances{0}\n'.format(instances)) for instance in instances: q_string='SELECT MEAN(value) FROM /' + depl_id + '\.' + node_name + '\.' + instance.id + '\.cpu_total_system/ GROUP BY time(10s) '\ 'WHERE time > now() - 40s' f.write('query string is{0}\n'.format(q_string)) try: result = c_influx.query(q_string) f.write('result is {0} \n'.format(result)) if not result: open('/home/ubuntu/cooldown', 'a').close() utime('/home/ubuntu/cooldown', None) execution_id = c.executions.start(depl_id, 'heal', {'node_id': instance.id}) except InfluxDBClientError as ee: f.write('DBClienterror {0}\n'.format(str(ee))) f.write('instance id is {0}\n'.format(instance)) except Exception as e: f.write(str(e))
def cli(): if len(sys.argv) < 4: print "USAGE: %s <host> <username> <password> [APP1] [APP2] ..." % sys.argv[ 0] sys.exit(1) (host, username, password) = sys.argv[1:4] apps_to_process = sys.argv[4:] all_raptor_apps = [r[0].lower() for r in RAPTOR_APPS] if not apps_to_process: apps_to_process = all_raptor_apps client = InfluxDBClient(host, 8086, username, password, 'raptor') resultdict = {} for app_to_process in apps_to_process: if app_to_process not in all_raptor_apps: print "ERROR: App %s does not exist?!" % app_to_process sys.exit(1) for (appname, context) in RAPTOR_APPS: if appname.lower() == app_to_process: resultdict[app_to_process] = get_alerts( client, appname, context) print json.dumps(resultdict)
def test_request_retry_raises(self, mock_request): """Test that three connection errors will not be handled.""" class CustomMock(object): """Define CustomMock object.""" def __init__(self): """Initialize the object.""" self.i = 0 def connection_error(self, *args, **kwargs): """Test the connection error for CustomMock.""" self.i += 1 if self.i < 4: raise requests.exceptions.ConnectionError else: r = requests.Response() r.status_code = 200 return r mock_request.side_effect = CustomMock().connection_error cli = InfluxDBClient(database='db') with self.assertRaises(requests.exceptions.ConnectionError): cli.write_points(self.dummy_points)
def test_get_database_list_deprecated(self): """Test deprecated get database list for TestInfluxDBClient.""" data = [{"name": "a_db"}] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') self.assertEqual(len(cli.get_database_list()), 1) self.assertEqual(cli.get_database_list()[0]['name'], 'a_db')
def main(): logging.basicConfig(stream=sys.stderr, level=getattr(logging, 'INFO')) logging.getLogger("requests.packages.urllib3.connectionpool").setLevel( "ERROR") logging.getLogger("marathon").setLevel("ERROR") logger = logging.getLogger("autoscaling") engine = create_engine("mysql://{}:{}@{}:{}/{}".format( MYSQLDB["USERNAME"], MYSQLDB["PASSWORD"], MYSQLDB["HOST"], MYSQLDB["PORT"], MYSQLDB["DBNAME"]), encoding='utf-8', echo=False) Session = sessionmaker(bind=engine) mysql_client = Session() marathon_client = MarathonClient('http://' + MARATHON['HOST'] + ':' + MARATHON['PORT']) influxdb_client = InfluxDBClient(INFLUXDB["HOST"], INFLUXDB["PORT"], INFLUXDB["USERNAME"], INFLUXDB["PASSWORD"], INFLUXDB["DBNAME"]) app_uuid = sys.argv[1] app = mysql_client.query(WebApp).filter_by(uuid=app_uuid).first() if app: decider = BaseRuleDecider(app, influxdb_client, marathon_client) logger.info("Start autoscaling: " + app_uuid) autoscaling = AutoScaling(decider, TIME_INTERVAL) autoscaling.run() else: logger.error("App uuid not found: " + app_uuid)
def test_write_points_batch_invalid_size(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient('localhost', 8086, 'username', 'password', 'db') cli.write_points(data=self.dummy_points, batch_size=-2) self.assertEqual(1, m.call_count)
def test_get_database_list_deprecated(self): data = [ {"name": "a_db"} ] with _mocked_session('get', 200, data): cli = InfluxDBClient('host', 8086, 'username', 'password') assert len(cli.get_database_list()) == 1 assert cli.get_database_list()[0]['name'] == 'a_db'
class Meta: """Define metadata AutoCommitTest object.""" series_name = 'events.stats.{server_name}' fields = ['time', 'server_name'] bulk_size = 5 client = InfluxDBClient() autocommit = True
def test_case_of_n_clients(num_of_clients): global influx_client # Get Client influx_client = InfluxDBClient(HOST, PORT, USER, PASSWORD, DBNAME) # Greate Database print("Create database: " + DBNAME) try: influx_client.create_database(DBNAME) except InfluxDBClientError: # Drop and create influx_client.delete_database(DBNAME) #influx_client.drop_database(DBNAME) influx_client.create_database(DBNAME) # Add retention policy print("Create a retention policy") #influx_client.create_retention_policy(retention_policy_name, '3d', 3, default=True) # Init benchmark_helper STARTED_TIMESTAMP = int(time.time()) influx_benchmark_helper.init(STARTED_TIMESTAMP, RUNNING_SECONDS, STATS_INTERVAL, BATCH_SIZE) print("Generating clients.. "), clients = generate_clients(num_of_clients) print("Done!\n") print("Run multiple clients and do batch writes "), # Run the clients! Do batch writes with multi clients map(lambda thread: thread.start(), clients) print("Done!\n") # Create and start the print stats thread stats_thread = Thread(target=print_stats_worker(num_of_clients)) stats_thread.daemon = True stats_thread.start() # And join them all but the stats, that we don't care about map(lambda thread: thread.join(), clients) # A call to thread1.join() blocks the thread in which you're making the call, until thread1 is finished. # It's like wait_until_finished(thread1). # Record final results into report.txt file is_final_result = True print_stats(num_of_clients, is_final_result) time.sleep(2) # Do query # query = "SELECT MEAN(value) FROM %s WHERE time > now() - 20d GROUP BY time(500m)" % (series_name) # result = influx_client.query(query) # print("Result: {0}".format(result)) # Drop Database print("Drop database: " + DBNAME) influx_client.delete_database(DBNAME)
def test_delete_database_user(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.DELETE, "http://localhost:8086/db/db/users/paul") cli = InfluxDBClient(database='db') cli.delete_database_user(username='******') self.assertIsNone(m.last_request.body)
def test_add_database_user_bad_permissions(self): cli = InfluxDBClient() with self.assertRaisesRegexp( Exception, "'permissions' must be \(readFrom, writeTo\) tuple"): cli.add_database_user(new_password='******', new_username='******', permissions=('hello', 'hello', 'hello'))
def test_alter_database_user_password(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/users/paul") cli = InfluxDBClient(database='db') cli.alter_database_user(username='******', password='******') self.assertDictEqual(json.loads(m.last_request.body), {'password': '******'})
def test_alter_database_admin(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/users/paul") cli = InfluxDBClient(database='db') cli.alter_database_admin(username='******', is_admin=False) self.assertDictEqual(json.loads(m.last_request.body), {'admin': False})
def test_delete_points(self): with _mocked_session('delete', 204) as mocked: cli = InfluxDBClient('host', 8086, 'username', 'password', 'db') assert cli.delete_points("foo") is True assert len(mocked.call_args_list) == 1 args, kwds = mocked.call_args_list[0] assert kwds['params'] == {'u': 'username', 'p': 'password'} assert kwds['url'] == 'http://host:8086/db/db/series/foo'
def test_write_points_string(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/db/db/series") cli = InfluxDBClient(database='db') cli.write_points(str(json.dumps(self.dummy_points))) self.assertListEqual(json.loads(m.last_request.body), self.dummy_points)
def test_update_cluster_admin_password(self): with requests_mock.Mocker() as m: m.register_uri(requests_mock.POST, "http://localhost:8086/cluster_admins/paul") cli = InfluxDBClient(database='db') cli.update_cluster_admin_password(username='******', new_password='******') self.assertDictEqual(json.loads(m.last_request.body), {'password': '******'})
def __init__(self, url, username=None, password=None): self.url = url parse = urlparse.urlparse(url) self.username = username or parse.username self.password = password or parse.password self.hostname = parse.hostname self.port = parse.port self.idb = InfluxDBClient(host=self.hostname, port=self.port, username=username, password=password)