def __init__(self): self.token = os.environ['INFLUX_DB__TOKEN'] self.org = os.environ['INFLUX_DB__ORG'] self.bucket = os.environ['INFLUX_DB__BUCKET'] self.client_url = os.environ['INFLUX_DB__CLIENT_URL'] self.client = InfluxDBClient( url=self.client_url, token=self.token, org=self.org) self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
def influxDBConnection(db_url, db_token, db_org): try: client = InfluxDBClient(url=db_url, token=db_token, org=db_org) write_api = client.write_api(write_options=SYNCHRONOUS) except Exception: print('ERROR: Database unavailable, check if -u, -t and -o options are correct') return client, write_api
def test_certificate_file(self): self._start_http_server() self.client = InfluxDBClient( f"https://localhost:{self.httpd.server_address[1]}", token="my-token", verify_ssl=True, ssl_ca_cert=f'{os.path.dirname(__file__)}/server.pem') ping = self.client.ping() self.assertTrue(ping)
def send_stats_to_influxdb(parsedStats, timestamp): client = InfluxDBClient(url="http://" + influx_ip + ":" + str(influx_port), token=influx_token, org=influx_org, verify_ssl=False) write_api = client.write_api(write_options=SYNCHRONOUS) try: write_api.write(bucket=influx_bucket, record=get_points(parsedStats, timestamp)) except Exception as error: print(error)
class HelpersTest(BaseTest): def test_is_id(self): self.assertTrue(_is_id("ffffffffffffffff")) self.assertTrue(_is_id("020f755c3c082000")) self.assertTrue(_is_id("ca55e77eca55e77e")) self.assertTrue(_is_id("02def021097c6000")) self.assertFalse(_is_id("gggggggggggggggg")) self.assertFalse(_is_id("abc")) self.assertFalse(_is_id("abcdabcdabcdabcd0")) self.assertFalse(_is_id("020f75")) self.assertFalse(_is_id("020f755c3c082000aaa")) self.assertFalse(_is_id(None)) def test_organization_as_query_param(self): organization = Organization(id="org-id", name="org-name") org = get_org_query_param(organization, self.client) self.assertEqual("org-id", org) def test_required_id(self): org = get_org_query_param(None, self.client, required_id=True) self.assertEqual(self.my_organization.id, org) def test_required_id_not_exist(self): with pytest.raises(InfluxDBError) as e: get_org_query_param("not_exist_name", self.client, required_id=True) assert "The client cannot find organization with name: 'not_exist_name' to determine their ID." in f"{e.value} " def test_both_none(self): self.client.close() self.client = InfluxDBClient(url=self.client.url, token="my-token") org = get_org_query_param(None, self.client) self.assertIsNone(org) def test_not_permission_to_read_org(self): # Create Token without permission to read Organizations resource = PermissionResource(type="buckets", org_id=self.find_my_org().id) authorization = self.client \ .authorizations_api() \ .create_authorization(org_id=self.find_my_org().id, permissions=[Permission(resource=resource, action="read"), Permission(resource=resource, action="write")]) self.client.close() # Initialize client without permission to read Organizations self.client = InfluxDBClient(url=self.client.url, token=authorization.token) with pytest.raises(InfluxDBError) as e: get_org_query_param("my-org", self.client, required_id=True) assert "The client cannot find organization with name: 'my-org' to determine their ID. Are you using token " \ "with sufficient permission?" in f"{e.value} "
def get_flux_session(): client = FluxClient( url="http://localhost:8086", token= "h0zgWrmVbUIqjIyI4A53B0sAY7l5S2YzvmCPskQKFIUcZjNUQxSfA0FO8ejx8wLnpqf8gAtPBJrDNmxXbQay7A==", org="sundaya", debug=True) try: yield client finally: client.__del__()
def __init__(self, config): self.logger = getLogger(self.__class__.__name__) self.org = config["influx"]["org"] self.bucket = config["influx"]["bucket"] client = InfluxDBClient(url=config["influx"]["url"], token=os.environ["INFLUX_TOKEN"]) write_options = WriteOptions( batch_size=config["influx"]["batch_size"], flush_interval=config["influx"]["interval"]) self.write_api = client.write_api(write_options=write_options)
def __init__(self, queue): multiprocessing.Process.__init__(self) self.queue = queue self.client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) self.write_api = self.client.write_api( write_options=WriteOptions(write_type=WriteType.batching, batch_size=50_000, flush_interval=10_000))
class InfluxDBLogger: def __init__( self, bucket_name=BUCKET, batch_size=LOG_BATCH_SIZE, data_retention=3600, ): self.organization = ORGANIZATION self.client = InfluxDBClient(url=INFLUXDB_URL, token=INFLUXDB_TOKEN, org=self.organization) self.batch_size = batch_size self.bucket_name = bucket_name self.write_api = self.client.write_api(write_options=WriteOptions( batch_size=self.batch_size)) self.query_api = self.client.query_api() self.buckets_api = self.client.buckets_api() bucket = self.buckets_api.find_bucket_by_name(self.bucket_name) if bucket is None: logger.warning(f"Bucket {self.bucket_name!r} not found. " f"Creating a bucket {self.bucket_name!r}.") retention_rules = None if data_retention is not None: retention_rules = BucketRetentionRules( type="expire", every_seconds=data_retention) self.buckets_api.create_bucket( bucket_name=self.bucket_name, retention_rules=retention_rules, org=self.organization, ) def send_event(self, record_type, message): point = Point(record_type) for key, value in message.items(): point = point.field(key, value) self.write_api.write(bucket=self.bucket_name, record=point) def get_events(self, record_type): query = ''' from(bucket: currentBucket) |> range(start: -5m, stop: now()) |> filter(fn: (r) => r._measurement == recordType) |> pivot(rowKey:["_time"], columnKey: ["_field"], \ valueColumn: "_value") ''' params = {"currentBucket": self.bucket_name, "recordType": record_type} tables = self.query_api.query(query=query, params=params) if len(tables) > 0: table, *_ = tables events = table.records else: events = [] return events
def start(self): try: self.bucket = self.get_vars.get_var("Bucket") self.org = self.get_vars.get_var("Org") self.client = InfluxDBClient( url=self.get_vars.get_var("InfluxDB_URL"), token=self.token) self.write_api = self.client.write_api(write_options=SYNCHRONOUS) self.connection = True except Exception as ex: logging.error(ex) raise Exception(ex)
def test_check_write_permission_by_empty_data(self): client = InfluxDBClient(url="http://localhost:8086", token="my-token-wrong", org="my-org") write_api = client.write_api(write_options=SYNCHRONOUS) with self.assertRaises(ApiException) as cm: write_api.write("my-bucket", self.org, b'') exception = cm.exception self.assertEqual(401, exception.status) self.assertEqual("Unauthorized", exception.reason) client.close()
def load_data_connection(): url = "http://octopi.local/api/connection" headers = {"X-Api-Key": X_API_KEY, "Content-Type": "application/json"} res_connection = requests.get(url, headers=headers).json() print(res_connection["current"]) dbClient = InfluxDBClient(url=INFLUXDB_CLOUD_HOST, token=INFLUXDB_TOKEN) write_api = dbClient.write_api() for data_point in data_convert_connection(res_connection): write_api.write(INFLUXDB_BUCKETID, INFLUXDB_ORGID, data_point)
def start(self): """ Start the submission thread """ self._running = True self._influx = InfluxDBClient(url=self._config['url'], org=self._config['org'], token=self._config['token'], enable_gzip=True) opts = WriteOptions(batch_size=self._batch_size, flush_interval=self._poll_interval * 1000, jitter_interval=5000) self._iwrite = self._influx.write_api(write_options=opts) self._car.register_data(self.data_callback)
def __init__(self, url, token, org, bucket_id='sun2000'): self.url = url self.token = token self.org = org self.bucket_id = bucket_id self.client = InfluxDBClient(url=self.url, token=self.token, org=self.org) self.write_api = self.client.write_api(write_options=SYNCHRONOUS) atexit.register(self.on_exit, self.client, self.write_api)
def setup_influxdb(): client = InfluxDBClient(url=settings.influxdb.url, token=settings.influxdb.token) # TODO decide on Batching or Asynchronous # if high sample rate, go with batching, # if low, probs go async as it should be able to handle it write_api = client.write_api(write_options=WriteOptions( write_type=WriteType.batching, batch_size=100, )) return (client, write_api)
def import_data(config, data): points = [] for d in data: point = create_point(d) points.append(point) client = InfluxDBClient(url=config['influxdb_url'], token=config['influxdb_token'], org=config['influxdb_org'], debug=False) write_api = client.write_api(write_options=WriteOptions(batch_size=100, flush_interval=50)) write_api.write(bucket=config['influxdb_bucket'], record=points) write_api.close() print("Imported {} records".format(len(points)))
def test_timeout_as_float(self): self.client = InfluxDBClient(url="http://localhost:8088", token="my-token", org="my-org", timeout=1000.5) self.assertEqual(1000.5, self.client.api_client.configuration.timeout) with pytest.raises(HTTPError) as e: write_api = self.client.write_api(write_options=SYNCHRONOUS) write_api.write(bucket="my-bucket", org="my-org", record="mem,tag=a value=1") self.assertIn("Failed to establish a new connection", str(e.value))
def test_TrailingSlashInUrl(self): self.client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org") self.assertEqual('http://localhost:8086', self.client.api_client.configuration.host) self.client = InfluxDBClient(url="http://localhost:8086/", token="my-token", org="my-org") self.assertEqual('http://localhost:8086', self.client.api_client.configuration.host)
def test_ConnectToSelfSignedServer(self): self._start_http_server() self.client = InfluxDBClient( f"https://localhost:{self.httpd.server_address[1]}", token="my-token", verify_ssl=False) health = self.client.health() self.assertEqual(health.message, 'ready for queries and writes') self.assertEqual(health.status, "pass") self.assertEqual(health.name, "influxdb")
def __init__(self, container, connection, topmax, **kwargs): self.container = container try: self.client = InfluxDBClient(url=connection['url'], token=connection['token'], org=connection["org"]) self.query_api = self.client.query_api() self._topmax = topmax for es in self.container.EntitySet: self.bind_entity_set(es) except Exception as e: logger.info("Failed to connect to initialize Influx Odata container") logger.exception(str(e))
def OccupancyReport(LocationInventory, startTime): # Connects to the database url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT) client = IF(url=url, token="", org="") # Preapre to create a table with maximum occupancy per room TABELA_LOCATION = "LocationInventory" write_api = client.write_api(write_options=SYNCHRONOUS) #Build a record per local record = [] #location_dict = json.loads(LocationInventory) for key, value in LocationInventory["rooms"].items(): _point = Point(TABELA_LOCATION).tag("location", key).field("occupancy", value) record.append(_point) # Write to temporary table write_api.write(bucket=INFLUXDB_DBNAME, record=record) # Build max occupancy query query = QUERY_MAX_OCCUPANCY % (startTime, TABELA_TOTAL, TABELA_LOCATION) tables = client.query_api().query(query) client.__del__() # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info # Build result as a list of all found records in history table result = {"PerHourHistory": [], "PerShiftHistory": []} for table in tables: for record in table.records: if (record.values['result'] == 'historyHour'): subdict = { x: record.values[x] for x in [ 'location', '_value', 'max', 'year', 'month', 'day', 'hour' ] } result["PerHourHistory"].append(subdict) elif (record.values['result'] == 'historyShift'): subdict = { x: record.values[x] for x in [ 'location', '_value', 'max', 'year', 'month', 'day', 'shift' ] } result["PerShiftHistory"].append(subdict) return result
def main(): opts = parse_args() logging.basicConfig(format="%(levelname)s: %(message)s") gstate = dish_common.GlobalState() gstate.points = [] gstate.deferred_points = [] gstate.timebase_synced = opts.skip_query gstate.start_timestamp = None gstate.start_counter = None if "verify_ssl" in opts.icargs and not opts.icargs["verify_ssl"]: # user has explicitly said be insecure, so don't warn about it warnings.filterwarnings("ignore", message="Unverified HTTPS request") signal.signal(signal.SIGTERM, handle_sigterm) if 'token' in opts.icargs: from influxdb_client import InfluxDBClient from influxdb_client.client.write_api import SYNCHRONOUS else: from influxdb import InfluxDBClient try: # attempt to hack around breakage between influxdb-python client and 2.0 server: gstate.influx_client = InfluxDBClient(**opts.icargs, headers={ "Accept": "application/json" }) except TypeError: # ...unless influxdb-python package version is too old gstate.influx_client = InfluxDBClient(**opts.icargs) try: next_loop = time.monotonic() while True: rc = loop_body(opts, gstate) if opts.loop_interval > 0.0: now = time.monotonic() next_loop = max(next_loop + opts.loop_interval, now) time.sleep(next_loop - now) else: break except Terminated: pass finally: if gstate.points: rc = flush_points(opts, gstate) gstate.influx_client.close() gstate.shutdown() sys.exit(rc)
def test_certificate_file(self): self._start_http_server() self.client = InfluxDBClient( f"https://localhost:{self.httpd.server_address[1]}", token="my-token", verify_ssl=True, ssl_ca_cert=f'{os.path.dirname(__file__)}/server.pem') health = self.client.health() self.assertEqual(health.message, 'ready for queries and writes') self.assertEqual(health.status, "pass") self.assertEqual(health.name, "influxdb")
def connect_to_database(self): """Open connection to our InfluxDB database""" self.bucket = "qmet" self.org = "NBI" token = "6qiWt5JUxv4fwhqjUZKfG7cyMqtaSSevlLC_hsVqG_w09xJH5oUwJPLBLdAyeC5Qe7LgOEmAhaIoLsXLvl8hhg==" self.dbclient = InfluxDBClient(url='http://localhost:9999', token=token, org=self.org) self.writer = self.dbclient.write_api() print('Got access to the Influxdatabase') print('*' * 50)
def __init__(self, influx_url, token, org_id, influx_bucket, res, debug=False, verbose=True): from influxdb_client import InfluxDBClient self.influx_url = influx_url self.token = token self.org_id = org_id self.influx_bucket = influx_bucket self.debug = debug self.verbose = verbose self.res = res self.client = InfluxDBClient(url=self.influx_url, token=self.token, org=self.org_id, debug=False) self.test = self.test_influx() return
def main(health_check_path: str, once: bool, delay_seconds: int): """Get the metrics, put them in the database, done.""" postal_code = _get_secret("POSTAL_CODE", "") nest_access_token = _get_secret("NEST_ACCESS_TOKEN", "") openweathermap_api_key = _get_secret("OPENWEATHERMAP_API_KEY", "") influx_token = _get_secret("INFLUX_TOKEN", "") influx_url = _get_secret("INFLUX_URL", "http://localhost:8086") influx_bucket = _get_secret("INFLUX_BUCKET", "nest_temperature_forwarder") influx_org = _get_secret("INFLUX_ORG", "nest_temperature_forwarder") if influx_token: client = InfluxDBClient( url=influx_url, token=influx_token, org=influx_org, ) write_api = client.write_api(write_options=SYNCHRONOUS) else: write_api = None _log( delay_seconds=delay_seconds, postal_code=postal_code, use_influx=truth(write_api), use_nest=truth(nest_access_token), use_weather=truth(openweathermap_api_key), level=logging.DEBUG, ) s = scheduler(time.time, time.sleep) def do(): add_data_points( write_api, influx_bucket, health_check_path, postal_code, nest_access_token, openweathermap_api_key, ) s.enter(delay_seconds, 1, do) # Get data points and scheduler itself to run again after DELAY_SECONDS do() # If not running once, start the scheduler if not once: try: s.run() except KeyboardInterrupt: _log(shutting_down="true")
def compute_mrt_for_simulation_ss(simulation_id: int, autocorrelation_plot=True ) -> MeanResponseTime: client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="BBM SpA") query_api = client.query_api() requests_duration = query_api.query_data_frame( 'import "experimental"' 'from(bucket:"k6") ' '|> range(start: -1y)' f'|> filter(fn: (r) => r._measurement == "http_req_duration" and r._field == "value" and r.status == "200" and r.simulation == "{simulation_id}")' '|> map(fn:(r) => ({r with _time: experimental.subDuration(d: duration(v: int(v: r._value*1000000.0)), from: r._time)}))' '|> sort(columns: ["_time"], desc: false)') values = requests_duration['_value'] if autocorrelation_plot: # Plot sample ACF plot_sample_autocorrelation(values, simulation_id) durations = values.to_numpy() print(f'Initial number of samples: {len(durations)}.') modulo = len(durations) % NUM_BATCH # If the samples are not a multiple of the number of batches # remove a number of initial samples corresponding to the modulo. if modulo != 0: durations = durations[modulo:] print( f'Removing {modulo} samples to get an equal number of samples in each batch ({len(durations) / NUM_BATCH}).' ) batches = np.split(durations, NUM_BATCH) batches_mean = [np.mean(b) for b in batches] grand_batches_mean = np.mean(batches_mean) batches_mean_est = sum([(b - grand_batches_mean)**2 for b in batches_mean]) / (NUM_BATCH - 1) t_quantile = t.ppf(1 - CI_LEVEL, df=NUM_BATCH - 1) ci_interval = (t_quantile * math.sqrt(batches_mean_est / NUM_BATCH)) ci_min = grand_batches_mean - ci_interval ci_max = grand_batches_mean + ci_interval mrt = MeanResponseTime(grand_batches_mean, ci_min, ci_max, ci_interval) print(mrt) return mrt
def refreshLatencyGraphs(secondsToRun): startTime = datetime.now() with open('statsByParentNode.json', 'r') as j: parentNodes = json.loads(j.read()) with open('statsByDevice.json', 'r') as j: devices = json.loads(j.read()) print("Retrieving device statistics") devices = getLatencies(devices, secondsToRun) print("Computing parent node statistics") parentNodes = getParentNodeStats(parentNodes, devices) print("Writing data to InfluxDB") bucket = influxDBBucket org = influxDBOrg token = influxDBtoken url="http://localhost:8086" client = InfluxDBClient( url=url, token=token, org=org ) write_api = client.write_api(write_options=SYNCHRONOUS) queriesToSend = [] for device in devices: if device['tcpLatency'] != None: p = Point('Latency').tag("Device", device['hostname']).tag("ParentNode", device['ParentNode']).tag("Type", "Device").field("TCP Latency", device['tcpLatency']) queriesToSend.append(p) for parentNode in parentNodes: if parentNode['tcpLatency'] != None: p = Point('Latency').tag("Device", parentNode['parentNodeName']).tag("ParentNode", parentNode['parentNodeName']).tag("Type", "Parent Node").field("TCP Latency", parentNode['tcpLatency']) queriesToSend.append(p) write_api.write(bucket=bucket, record=queriesToSend) print("Added " + str(len(queriesToSend)) + " points to InfluxDB.") client.close() #with open('statsByParentNode.json', 'w') as infile: # json.dump(parentNodes, infile) #with open('statsByDevice.json', 'w') as infile: # json.dump(devices, infile) endTime = datetime.now() durationSeconds = round((endTime - startTime).total_seconds()) print("Graphs updated within " + str(durationSeconds) + " seconds.")
def setUp(self) -> None: super(TasksApiTest, self).setUp() self.organization = self.find_my_org() self.authorization = self.add_tasks_authorization(self.organization) self.client.close() self.client = InfluxDBClient(self.host, self.authorization.token, debug=self.conf.debug) self.tasks_api = self.client.tasks_api() tasks = self.tasks_api.find_tasks() for task in tasks: if task.name.endswith("-IT"): self.tasks_api.delete_task(task.id)
def initialize_client() -> Optional[InfluxDBClient]: influxdb_settings = InfluxdbIntegrationSettings.get_solo() if not influxdb_settings.enabled: logger.debug( 'INFLUXDB: Integration disabled in settings (or due to an error previously)' ) return None use_secure_connection = influxdb_settings.secure in ( InfluxdbIntegrationSettings.SECURE_CERT_NONE, InfluxdbIntegrationSettings.SECURE_CERT_REQUIRED, ) if use_secure_connection: server_base_url = 'https://{}:{}'.format(influxdb_settings.hostname, influxdb_settings.port) else: server_base_url = 'http://{}:{}'.format(influxdb_settings.hostname, influxdb_settings.port) logger.debug('INFLUXDB: Initializing InfluxDB client for "%s"', server_base_url) influxdb_client = InfluxDBClient( url=server_base_url, token=influxdb_settings.api_token, verify_ssl=influxdb_settings.secure == InfluxdbIntegrationSettings.SECURE_CERT_REQUIRED, timeout=settings.DSMRREADER_CLIENT_TIMEOUT * 1000, # Ms! ) # logger.debug('INFLUXDB: InfluxDB client/server status: "%s"', influxdb_client.ready().status) if influxdb_client.buckets_api().find_bucket_by_name( influxdb_settings.bucket) is None: # pragma: nocover logger.debug('INFLUXDB: Creating InfluxDB bucket "%s"', influxdb_settings.bucket) try: influxdb_client.buckets_api().create_bucket( bucket_name=influxdb_settings.bucket, org=influxdb_settings.organization) except Exception as e: InfluxdbIntegrationSettings.objects.update(enabled=False) logger.error( 'Failed to instantiate InfluxDB connection, disabling InfluxDB integration' ) raise e return influxdb_client