class InfluxDBWriter(multiprocessing.Process): """ Writer that writes data in batches with 50_000 items. """ def __init__(self, queue): multiprocessing.Process.__init__(self) self.queue = queue self.client = InfluxDBClient(url="http://localhost:9999", token="my-token", org="my-org", debug=False) self.write_api = self.client.write_api( write_options=WriteOptions(write_type=WriteType.batching, batch_size=50_000, flush_interval=10_000)) def run(self): while True: next_task = self.queue.get() if next_task is None: # Poison pill means terminate self.terminate() self.queue.task_done() break self.write_api.write(bucket="my-bucket", record=next_task) self.queue.task_done() def terminate(self) -> None: proc_name = self.name print() print('Writer: flushing data...') self.write_api.__del__() self.client.__del__() print('Writer: closed'.format(proc_name))
def BestDayReport(startTime): # Connects to the database url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT) client = IF(url=url, token="", org="") # Build trace query query = QUERY_BEST_DAY % (startTime, TABELA_TOTAL) tables = client.query_api().query(query) csv = client.query_api().query_csv(query) client.__del__() # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info # Build result as a list of all found records in suspecTable result = [] for table in tables: for record in table.records: subdict = { x: record.values[x] for x in ['_value', 'day', 'hour', 'location'] } result.append(subdict) return result
class TestHealthMock(unittest.TestCase): def setUp(self) -> None: httpretty.enable() httpretty.reset() self.influxdb_client = InfluxDBClient(url="http://localhost", token="my-token") def tearDown(self) -> None: self.influxdb_client.__del__() httpretty.disable() def test_without_retry(self): httpretty.register_uri(httpretty.GET, uri="http://localhost/health", status=429, adding_headers={ 'Retry-After': '5', 'Content-Type': 'application/json' }, body="{\"message\":\"Health is not working\"}") check = self.influxdb_client.health() self.assertTrue("Health is not working" in check.message, msg=check.message) self.assertEqual(check.status, "fail") self.assertEqual(check.name, "influxdb") self.assertEqual(1, len(httpretty.httpretty.latest_requests)) def test_with_retry(self): self.influxdb_client.__del__() self.influxdb_client = InfluxDBClient(url="http://localhost", token="my-token", retries=Retry()) httpretty.register_uri( httpretty.GET, uri="http://localhost/health", status=200, adding_headers={'Content-Type': 'application/json'}, body= "{\"message\":\"ready for queries and writes\", \"name\":\"influxdb\", \"status\":\"pass\"}" ) httpretty.register_uri(httpretty.GET, uri="http://localhost/health", status=429, adding_headers={ 'Retry-After': '1', 'Content-Type': 'application/json' }, body="{\"message\":\"Health is not working\"}") health = self.influxdb_client.health() self.assertEqual(health.message, 'ready for queries and writes') self.assertEqual(health.status, "pass") self.assertEqual(health.name, "influxdb") self.assertEqual(2, len(httpretty.httpretty.latest_requests))
def main(): parse_row.progress = 0 url = "https://github.com/influxdata/influxdb-client-python/wiki/data/stock-prices-example.csv" response = requests.get(url, stream=True) data = rx \ .from_iterable(DictReader(response.iter_lines(decode_unicode=True))) \ .pipe(ops.map(lambda row: parse_row(row))) client = InfluxDBClient(url="http://localhost:9999", token="my-token", org="my-org", debug=False) write_api = client.write_api(write_options=WriteOptions(batch_size=50_000, flush_interval=10_000)) write_api.write(bucket="my-bucket", record=data) write_api.__del__() query = ''' from(bucket:"my-bucket") |> range(start: 0, stop: now()) |> filter(fn: (r) => r._measurement == "financial-analysis") |> filter(fn: (r) => r.symbol == "AAPL") |> filter(fn: (r) => r._field == "close") |> drop(columns: ["_start", "_stop", "table", "_field","_measurement"]) ''' result = client.query_api().query_data_frame(query=query) print(result.head(100)) """ Close client """ client.__del__()
def write_data_to_influx(data_to_write): influx_client = InfluxDBClient(url=InfluxDB.url, token=InfluxDB.token, org=InfluxDB.org) logging.info("Writing to InfluxDB") write_client = influx_client.write_api() write_client.write(bucket=InfluxDB.bucket, record=data_to_write) write_client.__del__() influx_client.__del__()
def get_flux_session(): client = FluxClient( url="http://localhost:8086", token= "h0zgWrmVbUIqjIyI4A53B0sAY7l5S2YzvmCPskQKFIUcZjNUQxSfA0FO8ejx8wLnpqf8gAtPBJrDNmxXbQay7A==", org="sundaya", debug=True) try: yield client finally: client.__del__()
def OccupancyReport(LocationInventory, startTime): # Connects to the database url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT) client = IF(url=url, token="", org="") # Preapre to create a table with maximum occupancy per room TABELA_LOCATION = "LocationInventory" write_api = client.write_api(write_options=SYNCHRONOUS) #Build a record per local record = [] #location_dict = json.loads(LocationInventory) for key, value in LocationInventory["rooms"].items(): _point = Point(TABELA_LOCATION).tag("location", key).field("occupancy", value) record.append(_point) # Write to temporary table write_api.write(bucket=INFLUXDB_DBNAME, record=record) # Build max occupancy query query = QUERY_MAX_OCCUPANCY % (startTime, TABELA_TOTAL, TABELA_LOCATION) tables = client.query_api().query(query) client.__del__() # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info # Build result as a list of all found records in history table result = {"PerHourHistory": [], "PerShiftHistory": []} for table in tables: for record in table.records: if (record.values['result'] == 'historyHour'): subdict = { x: record.values[x] for x in [ 'location', '_value', 'max', 'year', 'month', 'day', 'hour' ] } result["PerHourHistory"].append(subdict) elif (record.values['result'] == 'historyShift'): subdict = { x: record.values[x] for x in [ 'location', '_value', 'max', 'year', 'month', 'day', 'shift' ] } result["PerShiftHistory"].append(subdict) return result
def test_check_write_permission_by_empty_data(self): client = InfluxDBClient(url="http://localhost:9999", token="my-token-wrong", org="my-org") write_api = client.write_api(write_options=SYNCHRONOUS) with self.assertRaises(ApiException) as cm: write_api.write("my-bucket", self.org, b'') exception = cm.exception self.assertEqual(401, exception.status) self.assertEqual("Unauthorized", exception.reason) client.__del__()
def send(options, influxdata): client = InfluxDBClient(url=options["server"], token=options["token"], org=options["organization"], timeout=6000) logger.debug("connection established") write_api = client.write_api(write_options=ASYNCHRONOUS) write_api.write(options["bucket"], options["organization"], influxdata) logger.debug("writing to bucket %s", options["bucket"]) logger.debug("writing to organization %s", options["organization"]) logger.debug("writing data %s", influxdata) write_api.__del__() client.__del__() logger.debug("closing connection")
def main(): # By default the arguments are: my_token = 'rbuCuV6gRHPJRPIRrLB3kOp874S5mUywVUGXJIUe_o1bf2HpxSqy7E6VB9ZUHKzMK4vGNqo6g6TZipJ2PIEXog==' my_org = "UNIPI" bucket = "Bucket" interface = "en0" if len(sys.argv) == 5: my_token = sys.argv[1] my_org = sys.argv[2] bucket = sys.argv[3] interface = sys.argv[4] else: if len(sys.argv) != 1: print("Error: Number of arguments") exit(1) # Query for received bytes query1 = 'from(bucket: "' + bucket + '") |> range(start:-1h, stop: now()) ' \ '|> filter(fn: (r) => r._measurement == "net" )' \ '|> filter(fn: (r) => r._field == "bytes_recv" )' \ '|> filter(fn: (r) => r.interface == "' + interface + '" )' \ '|> derivative(unit: 1s, nonNegative: true,columns: ["_value"])' # Query for sent bytes query2 = 'from(bucket: "' + bucket + '") |> range(start:-1h, stop: now()) ' \ '|> filter(fn: (r) => r._measurement == "net" ) ' \ '|> filter(fn: (r) => r._field == "bytes_sent" ) ' \ '|> filter(fn: (r) => r.interface == "' + interface + '" ) ' \ '|> derivative(unit: 1s, nonNegative: true, columns: ["_value"] )' client = InfluxDBClient(url="http://localhost:9999", token=my_token, org=my_org) query_api = client.query_api() data1 = query_api.query_data_frame(query=query1) data2 = query_api.query_data_frame(query=query2) try: plt.plot(data1['_time'], data1['_value'], label='Bytes Received') plt.plot(data2['_time'], data2['_value'], label='Bytes Sent') except: print("Error: check the arguments") exit(1) plt.legend(loc='upper center', bbox_to_anchor=(0.5, -0.05), fancybox=True, shadow=True, ncol=5) plt.show() # Close client client.__del__() print("close client")
def test(options): answer = False try: client = InfluxDBClient(url=options["server"], token=options["token"], org=options["organization"], timeout=10000) query_api = client.query_api() query_api.query_stream('from(bucket:"ups") |> range(start: -10m)') answer = True except Exception as e: logger.error('could not connect to InfluxDB server: %s.', str(e)) finally: client.__del__() return answer
class WriteApiTestMock(BaseTest): def setUp(self) -> None: httpretty.enable() httpretty.reset() conf = influxdb_client.configuration.Configuration() conf.host = "http://localhost" conf.debug = False self.influxdb_client = InfluxDBClient(url=conf.host, token="my-token") def tearDown(self) -> None: self.influxdb_client.__del__() httpretty.disable() def test_writes_synchronous_without_retry(self): httpretty.register_uri(httpretty.POST, uri="http://localhost/api/v2/write", status=503) self.write_client = self.influxdb_client.write_api( write_options=SYNCHRONOUS) with self.assertRaises(ApiException) as cm: self.write_client.write( "my-bucket", "my-org", "h2o_feet,location=coyote_creek water_level=1 1") exception = cm.exception self.assertEqual("Service Unavailable", exception.reason) self.assertEqual(1, len(httpretty.httpretty.latest_requests)) def test_writes_asynchronous_without_retry(self): httpretty.register_uri(httpretty.POST, uri="http://localhost/api/v2/write", status=503) self.write_client = self.influxdb_client.write_api( write_options=ASYNCHRONOUS) with self.assertRaises(ApiException) as cm: self.write_client.write( "my-bucket", "my-org", "h2o_feet,location=coyote_creek water_level=1 1").get() exception = cm.exception self.assertEqual("Service Unavailable", exception.reason) self.assertEqual(1, len(httpretty.httpretty.latest_requests))
class InfluxDBWriterToPickle: def __init__(self): self.client = InfluxDBClient(url="http://localhost:8086", token="my-token", org="my-org", debug=False) self.write_api = self.client.write_api( write_options=WriteOptions(write_type=WriteType.batching, batch_size=50_000, flush_interval=10_000)) def write(self, record): self.write_api.write(bucket="my-bucket", record=record) def terminate(self) -> None: self.write_api.__del__() self.client.__del__()
def influx_client_delete_data(host='http://localhost:8086', measurement='', start_date_str="1970-01-01T00:00:00Z", stop_date_str="2100-02-01T00:00:00Z"): """删除数据 measurement: 表名 """ client = InfluxDBClient(url="http://localhost:8086", token="") delete_api = client.delete_api() try: delete_api.delete(start_date_str, stop_date_str, f'_measurement={measurement}', bucket='my-bucket', org='my-org') finally: client.__del__()
def get_last_data(self, hostsname=None) -> None: out_data = {} if hostsname: db_client = InfluxDBClient(url=self.url, token=self.token, org=self.org) self.logger.debug(self.query_string % hostsname) db_data = db_client.query_api().query_stream(query=(self.query_string % hostsname), org=self.org) for record in db_data: self.logger.debug( f'Time {record["_time"]} Down {record["DownloadBandwidth"]}, UploadBandwidth {record["UploadBandwidth"]}, Ping {record["PingLatency"]}') out_data = record.values out_data["time"] = out_data.pop("_time") del out_data["result"] del out_data["table"] self.logger.debug(out_data) db_client.__del__() return out_data
class WriterV2(Writer): def __init__(self, measurement_name: str, threads_count: int, seconds_count: int, line_protocols_count: int): Writer.__init__(self, measurement_name, threads_count, seconds_count, line_protocols_count) self.client = InfluxDBClient(url="http://localhost:9999", token="my-token", org="my-org", debug=False) self.write_api = self.client.write_api( WriteOptions(batch_size=50_000, flush_interval=10_000)) def write(self, records: List['str']): self.write_api.write(bucket="my-bucket", org="my-org", record=records) def terminate(self) -> None: super().terminate() self.client.__del__()
def TraceReport(targetUser, startTime, stopTime): # Connects to the database url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT) client = IF(url=url, token="", org="") # Build trace query query = QUERY_TRACE % (targetUser, startTime, stopTime, TABELA_TRACE) tables = client.query_api().query(query) client.__del__() # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info # Build result as a list of all found records in suspecTable result = [] for table in tables: for record in table.records: subdict = {x: record.values[x] for x in ['local', 'userid']} if subdict not in result: result.append(subdict) return result
class MultiprocessingWriter(multiprocessing.Process): """ The Helper class to write data into InfluxDB in independent OS process. Example: .. code-block:: python from influxdb_client import WriteOptions from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter def main(): writer = MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org", write_options=WriteOptions(batch_size=100)) writer.start() for x in range(1, 1000): writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}") writer.__del__() if __name__ == '__main__': main() How to use with context_manager: .. code-block:: python from influxdb_client import WriteOptions from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter def main(): with MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org", write_options=WriteOptions(batch_size=100)) as writer: for x in range(1, 1000): writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}") if __name__ == '__main__': main() How to handle batch events: .. code-block:: python from influxdb_client import WriteOptions from influxdb_client.client.exceptions import InfluxDBError from influxdb_client.client.util.multiprocessing_helper import MultiprocessingWriter class BatchingCallback(object): def success(self, conf: (str, str, str), data: str): print(f"Written batch: {conf}, data: {data}") def error(self, conf: (str, str, str), data: str, exception: InfluxDBError): print(f"Cannot write batch: {conf}, data: {data} due: {exception}") def retry(self, conf: (str, str, str), data: str, exception: InfluxDBError): print(f"Retryable error occurs for batch: {conf}, data: {data} retry: {exception}") def main(): callback = BatchingCallback() with MultiprocessingWriter(url="http://localhost:8086", token="my-token", org="my-org", success_callback=callback.success, error_callback=callback.error, retry_callback=callback.retry) as writer: for x in range(1, 1000): writer.write(bucket="my-bucket", record=f"mem,tag=a value={x}i {x}") if __name__ == '__main__': main() """ __started__ = False __disposed__ = False def __init__(self, **kwargs) -> None: """ Initialize defaults. For more information how to initialize the writer see the examples above. :param kwargs: arguments are passed into ``__init__`` function of ``InfluxDBClient`` and ``write_api``. """ multiprocessing.Process.__init__(self) self.kwargs = kwargs self.client = None self.write_api = None self.queue_ = multiprocessing.Manager().Queue() def write(self, **kwargs) -> None: """ Append time-series data into underlying queue. For more information how to pass arguments see the examples above. :param kwargs: arguments are passed into ``write`` function of ``WriteApi`` :return: None """ assert self.__disposed__ is False, 'Cannot write data: the writer is closed.' assert self.__started__ is True, 'Cannot write data: the writer is not started.' self.queue_.put(kwargs) def run(self): """Initialize ``InfluxDBClient`` and waits for data to writes into InfluxDB.""" # Initialize Client and Write API self.client = InfluxDBClient(**self.kwargs) self.write_api = self.client.write_api( write_options=self.kwargs.get('write_options', WriteOptions()), success_callback=self.kwargs.get('success_callback', _success_callback), error_callback=self.kwargs.get('error_callback', _error_callback), retry_callback=self.kwargs.get('retry_callback', _retry_callback)) # Infinite loop - until poison pill while True: next_record = self.queue_.get() if type(next_record) is _PoisonPill: # Poison pill means break the loop self.terminate() self.queue_.task_done() break self.write_api.write(**next_record) self.queue_.task_done() def start(self) -> None: """Start independent process for writing data into InfluxDB.""" super().start() self.__started__ = True def terminate(self) -> None: """ Cleanup resources in independent process. This function **cannot be used** to terminate the ``MultiprocessingWriter``. If you want to finish your writes please call: ``__del__``. """ if self.write_api: logger.info("flushing data...") self.write_api.__del__() self.write_api = None if self.client: self.client.__del__() self.client = None logger.info("closed") def __enter__(self): """Enter the runtime context related to this object.""" self.start() return self def __exit__(self, exc_type, exc_value, traceback): """Exit the runtime context related to this object.""" self.__del__() def __del__(self): """Dispose the client and write_api.""" if self.__started__: self.queue_.put(_PoisonPill()) self.queue_.join() self.join() self.queue_ = None self.__started__ = False self.__disposed__ = True
write_api = client.write_api(write_options=SYNCHRONOUS, point_settings=point_settings) write_api.write(bucket="my-bucket", record=df, data_frame_measurement_name="financial-analysis-df") """ Querying ingested data """ query = 'from(bucket:"my-bucket")' \ ' |> range(start: 0, stop: now())' \ ' |> filter(fn: (r) => r._measurement == "financial-analysis-df")' \ ' |> pivot(rowKey:["_time"], columnKey: ["_field"], valueColumn: "_value")' \ ' |> limit(n:10, offset: 0)' result = client.query_api().query(query=query) """ Processing results """ print() print("=== results ===") print() for table in result: for record in table.records: print('{4}: Open {0}, Close {1}, High {2}, Low {3}'.format( record["VIX Open"], record["VIX Close"], record["VIX High"], record["VIX Low"], record["type"])) """ Close client """ client.__del__()
class InfluxTelemetry: """ Submit all available data to anm influxdb """ def __init__(self, config, car, gps, evnotify): self._log = logging.getLogger("EVNotiPi/InfluxDB") self._log.info("Initializing InfluxDB") self._config = config self._evn_akey = evnotify._config['akey'] self._car = car self._cartype = car.get_evn_model() self._gps = gps self._poll_interval = config.get('interval', 60) self._batch_size = config.get('batch_size', 10000) self._influx = None self._iwrite = None def start(self): """ Start the submission thread """ self._running = True self._influx = InfluxDBClient(url=self._config['url'], org=self._config['org'], token=self._config['token'], enable_gzip=True) opts = WriteOptions(batch_size=self._batch_size, flush_interval=self._poll_interval * 1000, jitter_interval=5000) self._iwrite = self._influx.write_api(write_options=opts) self._car.register_data(self.data_callback) def stop(self): """ Stop the submission thread """ self._car.unregister_data(self.data_callback) self._running = False self._iwrite.__del__() self._influx.__del__() self._influx = None self._iwrite = None def data_callback(self, data): """ Callback to receive data from "car" """ self._log.debug("Enqeue...") p = { "measurement": "telemetry", "tags": { "cartype": self._cartype, "akey": self._evn_akey, } } fields = { k: v if k in INT_FIELD_LIST else float(v) for k, v in data.items() if v is not None } if 'gps_device' in data: p['tags']['gps_device'] = data['gps_device'] p["time"] = pyrfc3339.generate( datetime.fromtimestamp(data['timestamp'], timezone.utc)) p["fields"] = fields try: self._iwrite.write(bucket=self._config['bucket'], org=self._config['org'], record=[p]) except Exception as e: self._log.warning(str(e)) def check_thread(self): """ Return the status of the thread """ return self._running
class Monitor: def __init__(self, token_file, count, interval, output_type, influx_url, influx_token, influx_org, influx_bucket): self.api = common.ApiHelper(token_file) self.count = count self.forever = count is None self.interval = interval self.outtype = output_type if output_type == "influxdb": # init influx client self.influx = InfluxDBClient( url=influx_url, token=influx_token, org=influx_org ) self.influx_writer = \ self.influx.write_api(write_options=SYNCHRONOUS) self.influx_org = influx_org self.influx_bucket = influx_bucket def influx_write(self, data): self.influx_writer.write(self.influx_bucket, self.influx_org, data) def get_thermostats(self): body = { "selection": { "selectionType": "registered", "selectionMatch": "", "includeRuntime": True, "includeExtendedRuntime": True, "includeSensors": True, # TODO: figure out which of these are useful and create parsers "includeEquipmentStatus": False, "includeEvents": False, "includeDevice": False, "includeWeather": False, "includeProgram": False, } } resp = self.api.request('1/thermostat', params={ "format": "json", "body": json.dumps(body), }, method='get', auth_needed=True) if 'page' in resp and resp['page']['totalPages'] != 1: raise NotImplementedError("Multi-page responses are not yet " "handled.") return resp['thermostatList'] def run(self): while self.forever or self.count: logging.info("Polling thermostat API") # TODO: use thermostatSummary to figure out what has changed for td in self.get_thermostats(): therm = parsers.Thermostat(td) if self.outtype == "json": print(json.dumps(therm.points, default=str)) else: assert(self.outtype == "influxdb") self.influx_write(therm.points) if self.count: self.count -= 1 if self.forever or self.count: time.sleep(self.interval) self.influx_writer.__del__() self.influx.__del__()
class SetupInflux: def __init__(self, influx_url, token, org_id, influx_bucket, res, debug=False, verbose=True): from influxdb_client import InfluxDBClient self.influx_url = influx_url self.token = token self.org_id = org_id self.influx_bucket = influx_bucket self.debug = debug self.verbose = verbose self.res = res self.client = InfluxDBClient(url=self.influx_url, token=self.token, org=self.org_id, debug=False) self.test = self.test_influx() return def __del__(self): self.client.__del__() def get_start_times(self, devices, default_start, dynamic): """Get latest InfluxDB timestamps for devices for use as 'start times' for listing log files from S3""" from datetime import datetime, timedelta from dateutil.tz import tzutc default_start_dt = datetime.strptime( default_start, "%Y-%m-%d %H:%M:%S").replace(tzinfo=tzutc()) device_ids = [device.split("/")[1] for device in devices] start_times = [] if dynamic == False or self.test == 0: for device in device_ids: last_time = default_start_dt start_times.append(last_time) elif self.test != 0: for device in device_ids: influx_time = self.client.query_api().query( f'from(bucket:"{self.influx_bucket}") |> range(start: -100d) |> filter(fn: (r) => r["_measurement"] == "{device}") |> group() |> last()' ) if len(influx_time) == 0: last_time = default_start_dt else: last_time = influx_time[0].records[0]["_time"] last_time = last_time + timedelta(seconds=2) start_times.append(last_time) if self.verbose: print( f"Log files will be fetched for {device} from {last_time}" ) return start_times def add_signal_tags(self, df_signal): """Advanced: This can be used to add custom tags to the signals based on a specific use case logic. In effect, this will split the signal into multiple timeseries """ tag_columns = ["tag"] def event_test(row): return "event" if row[0] > 1200 else "no event" for tag in tag_columns: df_signal[tag] = df_signal.apply(lambda row: event_test(row), axis=1) return tag_columns, df_signal def write_signals(self, device_id, df_phys): """Given a device ID and a dataframe of physical values, resample and write each signal to a time series database :param device_id: ID of device (used as the 'measurement name') :param df_phys: Dataframe of physical values (e.g. as per output of can_decoder) """ tag_columns = [] if not df_phys.empty: for signal, group in df_phys.groupby("Signal")["Physical Value"]: df_signal = group.to_frame().rename( columns={"Physical Value": signal}) if self.res != "": df_signal = df_signal.resample(self.res).pad().dropna() if self.verbose: print( f"Signal: {signal} (mean: {round(df_signal[signal].mean(),2)} | records: {len(df_signal)} | resampling: {self.res})" ) # tag_columns, df_signal = self.add_signal_tags(df_signal) self.write_influx(device_id, df_signal, tag_columns) def write_influx(self, name, df, tag_columns): """Helper function to write signal dataframes to InfluxDB""" from influxdb_client import WriteOptions if self.test == 0: print("Please check your InfluxDB credentials") return _write_client = self.client.write_api(write_options=WriteOptions( batch_size=5000, flush_interval=1_000, jitter_interval=2_000, retry_interval=5_000, )) _write_client.write(self.influx_bucket, record=df, data_frame_measurement_name=name, data_frame_tag_columns=tag_columns) if self.verbose: print( f"- SUCCESS: {len(df.index)} records of {name} written to InfluxDB\n\n" ) _write_client.__del__() def delete_influx(self, device): """Given a 'measurement' name (e.g. device ID), delete the related data from InfluxDB""" start = "1970-01-01T00:00:00Z" stop = "2099-01-01T00:00:00Z" delete_api = self.client.delete_api() delete_api.delete( start, stop, f'_measurement="{device}"', bucket=self.influx_bucket, org=self.org_id, ) def test_influx(self): """Test the connection to your InfluxDB database""" if self.influx_url == "influx_endpoint": result = 0 else: try: test = self.client.query_api().query( f'from(bucket:"{self.influx_bucket}") |> range(start: -10s)' ) result = 1 except Exception as err: self.print_influx_error(str(err)) result = 0 return result def print_influx_error(self, err): warning = "- WARNING: Unable to write data to InfluxDB |" if "CERTIFICATE_VERIFY_FAILED" in err: print(f"{warning} check your influx_url ({self.influx_url})") elif "organization name" in err: print(f"{warning} check your org_id ({self.org_id})") elif "unauthorized access" in err: print(f"{warning} check your influx_url and token") elif "could not find bucket" in err: print(f"{warning} check your influx_bucket ({self.influx_bucket})") else: print(err)
def influx_client_query_data_frame(host='http://localhost:8086', query=''): """ 查询数据 """ client = InfluxDBClient(url=host, token="", org="", debug=False) result = client.query_api().query_data_frame(org="", query=query) client.__del__() return result