Esempio n. 1
0
def influx_client_delete_data(host='http://localhost:8086',
                              measurement='',
                              start_date_str="1970-01-01T00:00:00Z",
                              stop_date_str="2100-02-01T00:00:00Z"):
    """删除数据
    measurement: 表名
    """
    client = InfluxDBClient(url="http://localhost:8086", token="")
    delete_api = client.delete_api()
    try:
        delete_api.delete(start_date_str,
                          stop_date_str,
                          f'_measurement={measurement}',
                          bucket='my-bucket',
                          org='my-org')
    finally:
        client.__del__()
Esempio n. 2
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self) -> bool:
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2', _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug', _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if not influxdb_options.get('enable', None):
            _LOGGER.warning("InfluxDB support is disabled in the YAML configuration file")
            return True

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url, token=self._token, org=self._org, enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)")
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            multisma2_debug = os.getenv(_DEBUG_ENV_VAR, 'False').lower() in ('true', '1', 't')
            try:
                if multisma2_debug and debug_options.get('delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(multisma2_debug and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'")
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
        except NewConnectionError:
            _LOGGER.error(f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_points(self, points):
        if not self._write_api:
            return False
        try:
            self._write_api.write(bucket=self._bucket, record=points, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_points(): {e}")

    def write_history(self, site, topic):
        if not self._write_api:
            return False

        lookup = LP_LOOKUP.get(topic, None)
        if not lookup:
            _LOGGER.error(f"write_history(): unknown topic '{topic}'")
            return False

        measurement = lookup.get('measurement')
        tags = lookup.get('tags', None)
        field = lookup.get('field', None)
        lps = []
        for inverter in site:
            inverter_name = inverter.pop(0)
            name = inverter_name.get('inverter', 'sunnyboy')
            for history in inverter:
                t = history['t']
                v = history['v']
                if v is None:
                    continue
                lp = f"{measurement}"
                if tags and len(tags):
                    lp += f",{tags[0]}={name}"
                if isinstance(v, int):
                    lp += f" {field}={v}i {t}"
                    lps.append(lp)
                else:
                    _LOGGER.error(
                        f"write_history(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                    continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            _LOGGER.debug(f"write_history({site}, {topic}): {lps}")
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_history(): {e}")

    def write_sma_sensors(self, sensor, timestamp=None):
        if not self._client:
            return False

        ts = timestamp if timestamp is not None else int(time.time())
        lps = []
        for old_point in sensor:
            point = old_point.copy()
            topic = point.pop('topic', None)
            point.pop('precision', None)
            if topic:
                lookup = LP_LOOKUP.get(topic, None)
                if not lookup:
                    _LOGGER.error(f"write_sma_sensors(): unknown topic '{topic}'")
                    continue

                if not lookup.get('output', False):
                    continue

                if topic == 'production/today':
                    day = datetime.datetime.fromtimestamp(ts).date()
                    dt = datetime.datetime.combine(day, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/month':
                    month = datetime.date.fromtimestamp(ts).replace(day=1)
                    dt = datetime.datetime.combine(month, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/year':
                    year = datetime.date.fromtimestamp(ts).replace(month=1, day=1)
                    dt = datetime.datetime.combine(year, datetime.time(0, 0))
                    ts = int(dt.timestamp())

                measurement = lookup.get('measurement')
                tags = lookup.get('tags', None)
                for k, v in point.items():
                    field = lookup.get('field')
                    # sample: dc_measurements
                    lp = f'{measurement}'
                    if tags and len(tags):
                        # sample: dc_measurements,_inverter=sb71
                        lp += f',{tags[0]}={k}'
                    if not field:
                        field = k
                    if isinstance(v, int):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v}i {ts}'
                        lps.append(lp)
                    elif isinstance(v, float):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v} {ts}'
                        lps.append(lp)
                    elif isinstance(v, dict):
                        lp_prefix = f'{lp}'
                        for k1, v1 in v.items():
                            # sample: dc_measurements,_inverter=sb71
                            lp = f'{lp_prefix}'
                            if tags and len(tags) > 1:
                                # sample: dc_measurements,_inverter=sb71,_string=a
                                lp += f',{tags[1]}={k1}'
                            if isinstance(v1, int):
                                # sample: dc_measurements,_inverter=sb71,_string=a power=1000 1556813561098
                                lp += f' {field}={v1}i {ts}'
                                lps.append(lp)
                            elif isinstance(v1, float):
                                # sample: dc_measurements,_inverter=sb71,_string=a current=0.23 1556813561098
                                lp += f' {field}={v1} {ts}'
                                lps.append(lp)
                            else:
                                _LOGGER.error(
                                    f"write_sma_sensors(): unanticipated dictionary type '{type(v1)}' in measurement '{measurement}/{field}'")
                    else:
                        _LOGGER.error(
                            f"write_sma_sensors(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                        continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_sma_sensors(): {e}")

    def delete_bucket(self):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(
                    bucket_name=self._bucket, org_id=self._org, retention_rules=None, org=None)
                if bucket:
                    _LOGGER.info(f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}")
        except NewConnectionError:
            raise
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in connect_bucket(): {e}")
Esempio n. 3
0
class DeleteApiTest(BaseTest):
    def setUp(self) -> None:
        super(DeleteApiTest, self).setUp()
        response = self.buckets_api.find_buckets()

        for bucket in response.buckets:
            if bucket.name.endswith("_IT"):
                print("Delete bucket: ", bucket.name)
                self.buckets_api.delete_bucket(bucket)

        self.bucket = self.create_test_bucket()
        self.organization = self.find_my_org()

        resource = PermissionResource(type="buckets",
                                      org_id=self.organization.id,
                                      id=self.bucket.id)
        read_bucket = Permission(resource=resource, action="read")
        write_bucket = Permission(resource=resource, action="write")

        authorization = self.client.authorizations_api().create_authorization(
            org_id=self.organization.id,
            permissions=[read_bucket, write_bucket])
        self.auth_token = authorization.token
        self.client.close()
        self.client = InfluxDBClient(url=self.host,
                                     token=self.auth_token,
                                     debug=True,
                                     org=self.org)
        self.delete_api = self.client.delete_api()

    def test_delete_buckets(self):

        self._write_data()

        q = f'from(bucket:\"{self.bucket.name}\") |> range(start: 1970-01-01T00:00:00.000000001Z)'
        print(q)
        flux_tables = self.client.query_api().query(query=q,
                                                    org=self.organization.id)
        self.assertEqual(len(flux_tables), 1)
        self.assertEqual(len(flux_tables[0].records), 12)

        start = "1970-01-01T00:00:00.000000001Z"
        stop = "1970-01-01T00:00:00.000000012Z"
        self.delete_api.delete(start,
                               stop,
                               "",
                               bucket=self.bucket.id,
                               org=self.organization.id)

        flux_tables2 = self.client.query_api().query(
            f'from(bucket:"{self.bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z)',
            org=self.organization.id)
        self.assertEqual(len(flux_tables2), 0)

    def test_delete_buckets_by_name(self):

        self._write_data()

        q = f'from(bucket:\"{self.bucket.name}\") |> range(start: 1970-01-01T00:00:00.000000001Z)'
        print(q)
        flux_tables = self.client.query_api().query(query=q,
                                                    org=self.organization.id)
        self.assertEqual(len(flux_tables), 1)
        self.assertEqual(len(flux_tables[0].records), 12)

        start = "1970-01-01T00:00:00.000000001Z"
        stop = "1970-01-01T00:00:00.000000012Z"
        self.delete_api.delete(start,
                               stop,
                               "",
                               bucket=self.bucket.name,
                               org=self.organization.name)

        flux_tables2 = self.client.query_api().query(
            f'from(bucket:"{self.bucket.name}") |> range(start: 1970-01-01T00:00:00.000000001Z)',
            org=self.organization.id)
        self.assertEqual(len(flux_tables2), 0)

    def _write_data(self):

        write_api = self.client.write_api(write_options=SYNCHRONOUS)
        p1 = Point(measurement_name="h2o").tag("location",
                                               "coyote_creek").field(
                                                   "watter_level", 7.0).time(1)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=p1)

        p2 = Point(measurement_name="h2o").tag("location",
                                               "coyote_creek").field(
                                                   "watter_level", 8.0).time(2)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=p2)

        p3 = Point(measurement_name="h2o").tag("location",
                                               "coyote_creek").field(
                                                   "watter_level", 9.0).time(3)
        p4 = Point(measurement_name="h2o").tag(
            "location", "coyote_creek").field("watter_level", 10.0).time(4)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=[p3, p4])

        p5 = Point(measurement_name="h2o").tag(
            "location", "coyote_creek").field("watter_level", 11.0).time(5)
        p6 = Point(measurement_name="h2o").tag(
            "location", "coyote_creek").field("watter_level", 12.0).time(6)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=[p5, p6])

        p7 = Point(measurement_name="h2o").tag("location",
                                               "coyote_creek").field(
                                                   "watter_level", 8.0).time(7)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=p7)
        p8 = Point(measurement_name="h2o").tag("location",
                                               "coyote_creek").field(
                                                   "watter_level", 9.0).time(8)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=p8)

        p9 = Point(measurement_name="h2o").tag("location",
                                               "coyote_creek").field(
                                                   "watter_level", 9.0).time(9)
        p10 = Point(measurement_name="h2o").tag(
            "location", "coyote_creek").field("watter_level", 11.0).time(10)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=[p9, p10])

        p11 = Point(measurement_name="h2o").tag(
            "location", "coyote_creek").field("watter_level", 11.0).time(11)
        p12 = Point(measurement_name="h2o").tag(
            "location", "coyote_creek").field("watter_level", 13.0).time(12)
        write_api.write(bucket=self.bucket.name,
                        org=self.organization.name,
                        record=[p11, p12])
Esempio n. 4
0
from easysnmp import Session
from conf import *

# snmp constants
HOSTNAME = 'localhost'
COMMUNITY = 'public'
OID_RAM_UNUSED = 'iso.3.6.1.4.1.2021.4.11.0'
OID_DISK_USAGE = 'iso.3.6.1.4.1.2021.9.1.9.1'
OID_CPU_USAGE = 'iso.3.6.1.2.1.25.3.3.1.2'  #...1.2.core_no

# influxdb settings
client = InfluxDBClient(url="http://localhost:8086",
                        token=INFLUXDB_TOKEN,
                        org=INFLUXDB_ORGANIZATION)
write_api = client.write_api(write_options=SYNCHRONOUS)
delete_api = client.delete_api()
web.open_new_tab(f'http://{HOSTNAME}:8086')

# Initializing snmp session
session = Session(hostname=HOSTNAME, community=COMMUNITY, version=1)

print('Running...')
print('Press ctrl+c to stop monitoring ')

while True:
    cpus = session.walk(OID_CPU_USAGE)
    nCores = len(cpus)
    load_sum = 0
    for c in cpus:
        load_sum += int(c.value)
    # print( f'DEBUG >>> cores: {nCores} - load tot: {load_sum}')
Esempio n. 5
0
class SetupInflux:
    def __init__(self,
                 influx_url,
                 token,
                 org_id,
                 influx_bucket,
                 res,
                 debug=False,
                 verbose=True):
        from influxdb_client import InfluxDBClient

        self.influx_url = influx_url
        self.token = token
        self.org_id = org_id
        self.influx_bucket = influx_bucket
        self.debug = debug
        self.verbose = verbose
        self.res = res
        self.client = InfluxDBClient(url=self.influx_url,
                                     token=self.token,
                                     org=self.org_id,
                                     debug=False)
        self.test = self.test_influx()
        return

    def __del__(self):
        self.client.__del__()

    def get_start_times(self, devices, default_start, dynamic):
        """Get latest InfluxDB timestamps for devices for use as 'start times' for listing log files from S3"""
        from datetime import datetime, timedelta
        from dateutil.tz import tzutc

        default_start_dt = datetime.strptime(
            default_start, "%Y-%m-%d %H:%M:%S").replace(tzinfo=tzutc())
        device_ids = [device.split("/")[1] for device in devices]
        start_times = []

        if dynamic == False or self.test == 0:
            for device in device_ids:
                last_time = default_start_dt
                start_times.append(last_time)
        elif self.test != 0:
            for device in device_ids:
                influx_time = self.client.query_api().query(
                    f'from(bucket:"{self.influx_bucket}") |> range(start: -100d) |> filter(fn: (r) => r["_measurement"] == "{device}") |> group() |> last()'
                )

                if len(influx_time) == 0:
                    last_time = default_start_dt
                else:
                    last_time = influx_time[0].records[0]["_time"]
                    last_time = last_time + timedelta(seconds=2)

                start_times.append(last_time)

                if self.verbose:
                    print(
                        f"Log files will be fetched for {device} from {last_time}"
                    )

        return start_times

    def add_signal_tags(self, df_signal):
        """Advanced: This can be used to add custom tags to the signals
        based on a specific use case logic. In effect, this will
        split the signal into multiple timeseries
        """
        tag_columns = ["tag"]

        def event_test(row):
            return "event" if row[0] > 1200 else "no event"

        for tag in tag_columns:
            df_signal[tag] = df_signal.apply(lambda row: event_test(row),
                                             axis=1)

        return tag_columns, df_signal

    def write_signals(self, device_id, df_phys):
        """Given a device ID and a dataframe of physical values,
        resample and write each signal to a time series database

        :param device_id:   ID of device (used as the 'measurement name')
        :param df_phys:     Dataframe of physical values (e.g. as per output of can_decoder)
        """
        tag_columns = []

        if not df_phys.empty:
            for signal, group in df_phys.groupby("Signal")["Physical Value"]:
                df_signal = group.to_frame().rename(
                    columns={"Physical Value": signal})

                if self.res != "":
                    df_signal = df_signal.resample(self.res).pad().dropna()

                if self.verbose:
                    print(
                        f"Signal: {signal} (mean: {round(df_signal[signal].mean(),2)} | records: {len(df_signal)} | resampling: {self.res})"
                    )

                # tag_columns, df_signal = self.add_signal_tags(df_signal)

                self.write_influx(device_id, df_signal, tag_columns)

    def write_influx(self, name, df, tag_columns):
        """Helper function to write signal dataframes to InfluxDB"""
        from influxdb_client import WriteOptions

        if self.test == 0:
            print("Please check your InfluxDB credentials")
            return

        _write_client = self.client.write_api(write_options=WriteOptions(
            batch_size=5000,
            flush_interval=1_000,
            jitter_interval=2_000,
            retry_interval=5_000,
        ))

        _write_client.write(self.influx_bucket,
                            record=df,
                            data_frame_measurement_name=name,
                            data_frame_tag_columns=tag_columns)

        if self.verbose:
            print(
                f"- SUCCESS: {len(df.index)} records of {name} written to InfluxDB\n\n"
            )

        _write_client.__del__()

    def delete_influx(self, device):
        """Given a 'measurement' name (e.g. device ID), delete the related data from InfluxDB"""
        start = "1970-01-01T00:00:00Z"
        stop = "2099-01-01T00:00:00Z"

        delete_api = self.client.delete_api()
        delete_api.delete(
            start,
            stop,
            f'_measurement="{device}"',
            bucket=self.influx_bucket,
            org=self.org_id,
        )

    def test_influx(self):
        """Test the connection to your InfluxDB database"""
        if self.influx_url == "influx_endpoint":
            result = 0
        else:
            try:
                test = self.client.query_api().query(
                    f'from(bucket:"{self.influx_bucket}") |> range(start: -10s)'
                )
                result = 1
            except Exception as err:
                self.print_influx_error(str(err))
                result = 0

        return result

    def print_influx_error(self, err):
        warning = "- WARNING: Unable to write data to InfluxDB |"

        if "CERTIFICATE_VERIFY_FAILED" in err:
            print(f"{warning} check your influx_url ({self.influx_url})")
        elif "organization name" in err:
            print(f"{warning} check your org_id ({self.org_id})")
        elif "unauthorized access" in err:
            print(f"{warning} check your influx_url and token")
        elif "could not find bucket" in err:
            print(f"{warning} check your influx_bucket ({self.influx_bucket})")
        else:
            print(err)
Esempio n. 6
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self):
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2',
                                                _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug',
                                             _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if len(influxdb_options.keys()) == 0:
            raise FailedInitialization("missing 'influxdb2' options")

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url,
                                          token=self._token,
                                          org=self._org,
                                          enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)"
                )
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            cs_esphome_debug = os.getenv(_DEBUG_ENV_VAR,
                                         'False').lower() in ('true', '1', 't')
            try:
                if cs_esphome_debug and debug_options.get(
                        'delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(
                        f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(
                        cs_esphome_debug
                        and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(
                        f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'"
                    )
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(
                f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'"
            )
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f" client {e}")
            self._client = None
        except NewConnectionError:
            _LOGGER.error(
                f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(
                f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_point(self, measurement, tags, field, value, timestamp=None):
        """Write a single sensor to the database."""
        timestamp = timestamp if timestamp is not None else int(time.time())
        lp_tags = ''
        separator = ''
        for tag in tags:
            lp_tags += f"{separator}{tag.get('t')}={tag.get('v')}"
            separator = ','
        lp = f"{measurement}," + lp_tags + f" {field}={value} {timestamp}"

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=lp,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_point(): {e}")

    def write_points(self, points):
        """Write a list of points to the database."""
        try:
            self._write_api.write(bucket=self._bucket,
                                  record=points,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_points(): {e}")

    def write_batch_sensors(self, batch_sensors, timestamp=None):
        """Write a batch of sensors to the database."""

        if len(batch_sensors) == 0:
            return

        timestamp = timestamp if timestamp is not None else int(time.time())

        batch = []
        for record in batch_sensors:
            sensor = record.get('sensor', None)
            state = record.get('state', None)
            measurement = sensor.get('measurement', None)
            device = sensor.get('device', None)
            location = sensor.get('location', None)
            precision = sensor.get('precision', None)
            if measurement is None or device is None:
                raise InfluxDBFormatError(
                    "'measurement' and/or 'device' are required")

            location_tag = '' if not location or not len(
                location) else f',_location={location}'
            device_tag = f',_device={device}'
            value = round(
                state, precision) if ((precision is not None)
                                      and isinstance(state, float)) else state
            lp = f'{measurement}{device_tag}{location_tag} sample={value} {timestamp}'
            batch.append(lp)

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=batch,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_batch_sensors(): {e}")

    def delete_bucket(self):
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(bucket_name=self._bucket,
                                                   org_id=self._org,
                                                   retention_rules=None,
                                                   org=None)
                if bucket:
                    _LOGGER.info(
                        f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in connect_bucket(): {e}")
Esempio n. 7
0
with open(authFile) as f:
    try:
        influxAuth = json.load(f)
    except:
        logger.error('Failed to parse auth file : %s' % authFile)
        exit(-1)

logger.info('Using auth token : %s' % influxAuth['token'])

influxClient = InfluxDBClient(url="http://localhost:9999",
                              token=influxAuth['token'],
                              org=influxOrg,
                              enable_gzip=True)

writeClient = influxClient.write_api(write_options=SYNCHRONOUS)
deleteClient = influxClient.delete_api()

logger.info('Wiping influx bucket %s' % influxBucket)

startDate = '%sZ' % datetime.datetime(year=2019, month=10, day=1).isoformat()
endDate = '%sZ' % datetime.datetime.utcnow().replace(microsecond=0).isoformat()

logger.info(startDate)
logger.info(endDate)

deleteClient.delete(start=startDate,
                    stop=endDate,
                    predicate='',
                    bucket_id=influxAuth['bucket_id'],
                    org_id=influxAuth['org_id'])