Exemple #1
0
def initialize_client() -> Optional[InfluxDBClient]:
    influxdb_settings = InfluxdbIntegrationSettings.get_solo()

    if not influxdb_settings.enabled:
        logger.debug(
            'INFLUXDB: Integration disabled in settings (or due to an error previously)'
        )
        return None

    use_secure_connection = influxdb_settings.secure in (
        InfluxdbIntegrationSettings.SECURE_CERT_NONE,
        InfluxdbIntegrationSettings.SECURE_CERT_REQUIRED,
    )

    if use_secure_connection:
        server_base_url = 'https://{}:{}'.format(influxdb_settings.hostname,
                                                 influxdb_settings.port)
    else:
        server_base_url = 'http://{}:{}'.format(influxdb_settings.hostname,
                                                influxdb_settings.port)

    logger.debug('INFLUXDB: Initializing InfluxDB client for "%s"',
                 server_base_url)

    influxdb_client = InfluxDBClient(
        url=server_base_url,
        token=influxdb_settings.api_token,
        verify_ssl=influxdb_settings.secure ==
        InfluxdbIntegrationSettings.SECURE_CERT_REQUIRED,
        timeout=settings.DSMRREADER_CLIENT_TIMEOUT * 1000,  # Ms!
    )
    # logger.debug('INFLUXDB: InfluxDB client/server status: "%s"', influxdb_client.ready().status)

    if influxdb_client.buckets_api().find_bucket_by_name(
            influxdb_settings.bucket) is None:  # pragma: nocover
        logger.debug('INFLUXDB: Creating InfluxDB bucket "%s"',
                     influxdb_settings.bucket)

        try:
            influxdb_client.buckets_api().create_bucket(
                bucket_name=influxdb_settings.bucket,
                org=influxdb_settings.organization)
        except Exception as e:
            InfluxdbIntegrationSettings.objects.update(enabled=False)
            logger.error(
                'Failed to instantiate InfluxDB connection, disabling InfluxDB integration'
            )
            raise e

    return influxdb_client
Exemple #2
0
class BaseTest(unittest.TestCase):
    def setUp(self) -> None:
        self.conf = influxdb_client.configuration.Configuration()

        self.host = os.getenv('INFLUXDB_V2_URL', "http://localhost:8086")
        self.debug = False
        self.auth_token = os.getenv('INFLUXDB_V2_TOKEN', "my-token")
        self.org = os.getenv('INFLUXDB_V2_ORG', "my-org")

        self.client = InfluxDBClient(url=self.host,
                                     token=self.auth_token,
                                     debug=self.debug,
                                     org=self.org)
        self.api_client = self.client.api_client

        self.query_api = self.client.query_api()
        self.buckets_api = self.client.buckets_api()
        self.users_api = self.client.users_api()
        self.organizations_api = self.client.organizations_api()
        self.authorizations_api = self.client.authorizations_api()
        self.labels_api = self.client.labels_api()

        self.my_organization = self.find_my_org()

    def tearDown(self) -> None:
        self.client.close()

    def create_test_bucket(self):
        bucket_name = generate_bucket_name()
        bucket = self.buckets_api.create_bucket(bucket_name=bucket_name,
                                                org=self.my_organization,
                                                description=bucket_name +
                                                "description")
        return bucket

    def delete_test_bucket(self, bucket):
        return self.buckets_api.delete_bucket(bucket)

    def find_my_org(self) -> Organization:
        return self.client.organizations_api().find_organizations(
            org=self.org)[0]

    @staticmethod
    def log(args):
        print(">>>", args)

    @staticmethod
    def generate_name(prefix):
        assert prefix != "" or prefix is not None
        return prefix + str(datetime.datetime.now().timestamp()) + "-IT"

    @classmethod
    def retention_rule(cls) -> BucketRetentionRules:
        return BucketRetentionRules(type='expire', every_seconds=3600)

    def assertEqualIgnoringWhitespace(self, first, second, msg=None) -> None:
        whitespace_pattern = re.compile(r"\s+")
        self.assertEqual(whitespace_pattern.sub("", first),
                         whitespace_pattern.sub("", second),
                         msg=msg)
Exemple #3
0
class InfluxDBLogger:
    def __init__(
        self,
        bucket_name=BUCKET,
        batch_size=LOG_BATCH_SIZE,
        data_retention=3600,
    ):
        self.organization = ORGANIZATION
        self.client = InfluxDBClient(url=INFLUXDB_URL,
                                     token=INFLUXDB_TOKEN,
                                     org=self.organization)
        self.batch_size = batch_size
        self.bucket_name = bucket_name

        self.write_api = self.client.write_api(write_options=WriteOptions(
            batch_size=self.batch_size))
        self.query_api = self.client.query_api()
        self.buckets_api = self.client.buckets_api()
        bucket = self.buckets_api.find_bucket_by_name(self.bucket_name)
        if bucket is None:
            logger.warning(f"Bucket {self.bucket_name!r} not found. "
                           f"Creating a bucket {self.bucket_name!r}.")
            retention_rules = None
            if data_retention is not None:
                retention_rules = BucketRetentionRules(
                    type="expire", every_seconds=data_retention)
            self.buckets_api.create_bucket(
                bucket_name=self.bucket_name,
                retention_rules=retention_rules,
                org=self.organization,
            )

    def send_event(self, record_type, message):
        point = Point(record_type)
        for key, value in message.items():
            point = point.field(key, value)
        self.write_api.write(bucket=self.bucket_name, record=point)

    def get_events(self, record_type):
        query = '''
            from(bucket: currentBucket)
            |> range(start: -5m, stop: now())
            |> filter(fn: (r) => r._measurement == recordType)
            |> pivot(rowKey:["_time"], columnKey: ["_field"], \
                valueColumn: "_value")
        '''
        params = {"currentBucket": self.bucket_name, "recordType": record_type}
        tables = self.query_api.query(query=query, params=params)
        if len(tables) > 0:
            table, *_ = tables
            events = table.records
        else:
            events = []
        return events
    def _connect(self):

        while not self.write_api:
            client = InfluxDBClient(url=self.url,
                                    token=self.auth_token,
                                    org=self.org)

            # get the orgID from the name:
            try:
                organizations_api = client.organizations_api()
                orgs = organizations_api.find_organizations()
            except:
                self.client = None
                logging.warning('Error connecting to the InfluxDB API. '
                                'Please confirm that InfluxDB is running and '
                                'that the authentication token is correct.'
                                'Sleeping before trying again.')
                time.sleep(5)
                continue

            # Look up the organization id for our org
            our_org = next((org for org in orgs if org.name == self.org), None)
            if not our_org:
                logging.fatal('Can not find org "%s" in InfluxDB', self.org)
                raise RuntimeError('Can not find org "%s" in InfluxDB' %
                                   self.org)
            self.org_id = our_org.id

            # get the bucketID from the name:
            bucket_api = client.buckets_api()
            bucket = bucket_api.find_bucket_by_name(self.bucket_name)

            # if the bucket does not exist then try to create it
            if bucket:
                self.bucket_id = bucket.id
            else:
                try:
                    logging.info('Creating new bucket for: %s',
                                 self.bucket_name)
                    new_bucket = bucket_api.create_bucket(
                        bucket_name=self.bucket_name, org_id=self.org_id)
                    self.bucket_id = new_bucket.id
                except:
                    logging.fatal('Can not create InfluxDB bucket "%s"',
                                  self.bucket_name)
                    raise RuntimeError('Can not create InfluxDB bucket "%s"' %
                                       self.bucket_name)

            self.write_api = client.write_api(write_options=ASYNCHRONOUS)
Exemple #5
0
def setup(request):
    print("Load testing data")

    client = InfluxDBClient(url=url, token=token, org=my_org, debug=False)
    write_api = client.write_api()
    buckets_api = client.buckets_api()
    query_api = client.query_api()

    org = find_my_org(client, my_org)

    bucket = buckets_api.find_bucket_by_name(bucket_name=test_bucket)
    if bucket is not None:
        buckets_api.delete_bucket(bucket=bucket)

    bucket = buckets_api.create_bucket(bucket_name=test_bucket, org_id=org.id)

    num_records = 10
    num_series = 10

    today = datetime.utcnow()
    print("*** Write test series ***")
    for loc in range(num_series):
        for i in range(num_records):
            time_ = today - timedelta(hours=i + 1)
            point = Point(measurement_name="h2o_feet") \
                .time(time_) \
                .field("water_level", float(i)) \
                .tag("location", "location_" + str(loc)) \
                .tag("country", "country_" + str(loc))
            write_api.write(bucket=bucket.name, record=point)

    time.sleep(1)

    query = 'from(bucket:"{0}") |> range(start: 1970-01-01T00:00:00.000000001Z)'.format(
        test_bucket)

    flux_result = query_api.query(query)

    assert len(flux_result) == num_series
    records = flux_result[0].records
    assert len(records) == num_records

    ip.run_line_magic("load_ext", "flux")
    ip.run_line_magic("flux",
                      "http://localhost:9999 --token my-token --org my-org")
    request.addfinalizer(cleanup)
Exemple #6
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self) -> bool:
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2', _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug', _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if not influxdb_options.get('enable', None):
            _LOGGER.warning("InfluxDB support is disabled in the YAML configuration file")
            return True

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url, token=self._token, org=self._org, enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)")
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            multisma2_debug = os.getenv(_DEBUG_ENV_VAR, 'False').lower() in ('true', '1', 't')
            try:
                if multisma2_debug and debug_options.get('delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(multisma2_debug and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'")
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
        except NewConnectionError:
            _LOGGER.error(f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_points(self, points):
        if not self._write_api:
            return False
        try:
            self._write_api.write(bucket=self._bucket, record=points, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_points(): {e}")

    def write_history(self, site, topic):
        if not self._write_api:
            return False

        lookup = LP_LOOKUP.get(topic, None)
        if not lookup:
            _LOGGER.error(f"write_history(): unknown topic '{topic}'")
            return False

        measurement = lookup.get('measurement')
        tags = lookup.get('tags', None)
        field = lookup.get('field', None)
        lps = []
        for inverter in site:
            inverter_name = inverter.pop(0)
            name = inverter_name.get('inverter', 'sunnyboy')
            for history in inverter:
                t = history['t']
                v = history['v']
                if v is None:
                    continue
                lp = f"{measurement}"
                if tags and len(tags):
                    lp += f",{tags[0]}={name}"
                if isinstance(v, int):
                    lp += f" {field}={v}i {t}"
                    lps.append(lp)
                else:
                    _LOGGER.error(
                        f"write_history(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                    continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            _LOGGER.debug(f"write_history({site}, {topic}): {lps}")
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_history(): {e}")

    def write_sma_sensors(self, sensor, timestamp=None):
        if not self._client:
            return False

        ts = timestamp if timestamp is not None else int(time.time())
        lps = []
        for old_point in sensor:
            point = old_point.copy()
            topic = point.pop('topic', None)
            point.pop('precision', None)
            if topic:
                lookup = LP_LOOKUP.get(topic, None)
                if not lookup:
                    _LOGGER.error(f"write_sma_sensors(): unknown topic '{topic}'")
                    continue

                if not lookup.get('output', False):
                    continue

                if topic == 'production/today':
                    day = datetime.datetime.fromtimestamp(ts).date()
                    dt = datetime.datetime.combine(day, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/month':
                    month = datetime.date.fromtimestamp(ts).replace(day=1)
                    dt = datetime.datetime.combine(month, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/year':
                    year = datetime.date.fromtimestamp(ts).replace(month=1, day=1)
                    dt = datetime.datetime.combine(year, datetime.time(0, 0))
                    ts = int(dt.timestamp())

                measurement = lookup.get('measurement')
                tags = lookup.get('tags', None)
                for k, v in point.items():
                    field = lookup.get('field')
                    # sample: dc_measurements
                    lp = f'{measurement}'
                    if tags and len(tags):
                        # sample: dc_measurements,_inverter=sb71
                        lp += f',{tags[0]}={k}'
                    if not field:
                        field = k
                    if isinstance(v, int):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v}i {ts}'
                        lps.append(lp)
                    elif isinstance(v, float):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v} {ts}'
                        lps.append(lp)
                    elif isinstance(v, dict):
                        lp_prefix = f'{lp}'
                        for k1, v1 in v.items():
                            # sample: dc_measurements,_inverter=sb71
                            lp = f'{lp_prefix}'
                            if tags and len(tags) > 1:
                                # sample: dc_measurements,_inverter=sb71,_string=a
                                lp += f',{tags[1]}={k1}'
                            if isinstance(v1, int):
                                # sample: dc_measurements,_inverter=sb71,_string=a power=1000 1556813561098
                                lp += f' {field}={v1}i {ts}'
                                lps.append(lp)
                            elif isinstance(v1, float):
                                # sample: dc_measurements,_inverter=sb71,_string=a current=0.23 1556813561098
                                lp += f' {field}={v1} {ts}'
                                lps.append(lp)
                            else:
                                _LOGGER.error(
                                    f"write_sma_sensors(): unanticipated dictionary type '{type(v1)}' in measurement '{measurement}/{field}'")
                    else:
                        _LOGGER.error(
                            f"write_sma_sensors(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                        continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_sma_sensors(): {e}")

    def delete_bucket(self):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(
                    bucket_name=self._bucket, org_id=self._org, retention_rules=None, org=None)
                if bucket:
                    _LOGGER.info(f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}")
        except NewConnectionError:
            raise
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in connect_bucket(): {e}")
class TasksApiTest(BaseTest):
    def setUp(self) -> None:
        super(TasksApiTest, self).setUp()

        self.organization = self.find_my_org()
        self.authorization = self.add_tasks_authorization(self.organization)
        self.client.close()

        self.client = InfluxDBClient(self.host,
                                     self.authorization.token,
                                     debug=self.conf.debug)
        self.tasks_api = self.client.tasks_api()

        tasks = self.tasks_api.find_tasks()
        for task in tasks:
            if task.name.endswith("-IT"):
                self.tasks_api.delete_task(task.id)

    def add_tasks_authorization(self, organization):
        resource = PermissionResource(org=organization.name, type="tasks")

        create_task = Permission(resource=resource, action="read")
        delete_task = Permission(resource=resource, action="write")

        org_resource = PermissionResource(type="orgs")
        create_org = Permission(resource=org_resource, action="write")
        read_org = Permission(resource=org_resource, action="read")

        user_resource = PermissionResource(type="users")
        create_users = Permission(resource=user_resource, action="write")

        label_resource = PermissionResource(type="labels")
        create_labels = Permission(resource=label_resource, action="write")

        auth_resource = PermissionResource(type="authorizations")
        create_auth = Permission(resource=auth_resource, action="write")

        bucket = self.client.buckets_api().find_bucket_by_name("my-bucket")
        bucket_resource = PermissionResource(org_id=organization.id,
                                             id=bucket.id,
                                             type="buckets")
        read_bucket = Permission(resource=bucket_resource, action="read")
        write_bucket = Permission(resource=bucket_resource, action="write")

        return self.client.authorizations_api().create_authorization(
            org_id=organization.id,
            permissions=[
                create_task, delete_task, create_org, read_org, create_users,
                create_labels, create_auth, read_bucket, write_bucket
            ])

    def test_create_task(self):
        task_name = self.generate_name("it_task")

        flux = \
            '''option task = {{ 
                name: "{task_name}",
                every: 1h
            }}
            {flux}
            '''.format(task_name=task_name, flux=TASK_FLUX)

        task = Task(id=0,
                    name=task_name,
                    org_id=self.organization.id,
                    flux=flux,
                    status="active",
                    description="Task Description")

        task = self.tasks_api.create_task(task)

        print(task)

        self.assertIsNotNone(task)
        self.assertGreater(len(task.id), 1)

        self.assertEqual(task.name, task_name)
        self.assertEqual(task.org_id, self.organization.id)
        self.assertEqual(task.status, "active")
        self.assertEqual(task.every, "1h")
        self.assertEqual(task.cron, None)
        self.assertEqualIgnoringWhitespace(task.flux, flux)

        self.assertEqual(task.description, "Task Description")

    def test_create_task_with_offset(self):
        task_name = self.generate_name("it_task")

        flux = \
            '''option task = {{ 
                name: "{task_name}",
                every: 1h,
                offset: 30m
            }}
            {flux}
            '''.format(task_name=task_name, flux=TASK_FLUX)

        task = Task(id=0,
                    name=task_name,
                    org_id=self.organization.id,
                    flux=flux,
                    status="active",
                    description="Task Description")

        task = self.tasks_api.create_task(task)

        print(task)

        self.assertIsNotNone(task)
        self.assertEqual(task.offset, "30m")

    def test_create_task_every(self):
        task_name = self.generate_name("it_task")
        task = self.tasks_api.create_task_every(task_name, TASK_FLUX, "1h",
                                                self.organization)
        print(task)

        self.assertIsNotNone(task)
        self.assertGreater(len(task.id), 1)

        self.assertEqual(task.name, task_name)
        self.assertEqual(task.org_id, self.organization.id)
        self.assertEqual(task.status, "active")
        self.assertEqual(task.every, "1h")
        self.assertEqual(task.cron, None)
        self.assertTrue(task.flux.endswith(TASK_FLUX))

    def test_create_task_cron(self):
        task_name = self.generate_name("it task")
        task = self.tasks_api.create_task_cron(task_name, TASK_FLUX,
                                               "0 2 * * *",
                                               self.organization.id)

        self.assertIsNotNone(task)
        self.assertGreater(len(task.id), 1)

        self.assertEqual(task.name, task_name)
        self.assertEqual(task.org_id, self.organization.id)
        self.assertEqual(task.status, "active")
        self.assertEqual(task.every, None)
        self.assertEqual(task.cron, "0 2 * * *")
        # self.assertEqualIgnoringWhitespace(task.flux, flux)

        self.assertTrue(task.flux.endswith(TASK_FLUX))
        # self.assertEqual(task.links, "active")

        links = task.links
        self.assertIsNotNone(task.links)
        self.assertEqual(links.logs, "/api/v2/tasks/" + task.id + "/logs")
        self.assertEqual(links.members,
                         "/api/v2/tasks/" + task.id + "/members")
        self.assertEqual(links.owners, "/api/v2/tasks/" + task.id + "/owners")
        self.assertEqual(links.runs, "/api/v2/tasks/" + task.id + "/runs")
        self.assertEqual(links._self, "/api/v2/tasks/" + task.id)

        # TODO missing get labels
        self.assertEqual(links.labels, "/api/v2/tasks/" + task.id + "/labels")

    def test_find_task_by_id(self):
        task_name = self.generate_name("it task")
        task = self.tasks_api.create_task_cron(task_name, TASK_FLUX,
                                               "0 2 * * *",
                                               self.organization.id)

        task_by_id = self.tasks_api.find_task_by_id(task.id)
        self.assertEqual(task, task_by_id)

    @pytest.mark.skip(
        reason="https://github.com/influxdata/influxdb/issues/13576")
    @pytest.mark.skip(
        reason="https://github.com/influxdata/influxdb/issues/11590")
    def test_find_task_by_user_id(self):
        task_user = self.users_api.create_user(self.generate_name("TaskUser"))
        self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                        TASK_FLUX, "0 2 * * *",
                                        self.organization.id)
        tasks = self.tasks_api.find_tasks_by_user(task_user_id=task_user.id)
        print(tasks)
        self.assertEquals(len(tasks), 1)

    def test_delete_task(self):
        task = self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                               TASK_FLUX, "0 2 * * *",
                                               self.organization.id)
        self.assertIsNotNone(task)

        self.tasks_api.delete_task(task.id)
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.find_task_by_id(task_id=task.id)
        assert "failed to find task" in e.value.body

    def test_update_task(self):
        task_name = self.generate_name("it task")
        cron_task = self.tasks_api.create_task_cron(task_name, TASK_FLUX,
                                                    "0 2 * * *",
                                                    self.organization.id)

        flux = '''
        option task = {{
            name: "{task_name}",
            every: 3m
        }}
        
        {flux}
        '''.format(task_name=task_name, flux=TASK_FLUX)

        cron_task.cron = None
        cron_task.every = "3m"
        cron_task.status = "inactive"
        cron_task.description = "Updated description"

        updated_task = self.tasks_api.update_task(cron_task)
        time.sleep(1)

        self.assertIsNotNone(updated_task)
        self.assertGreater(len(updated_task.id), 1)

        self.assertEqual(updated_task.name, task_name)
        self.assertEqual(updated_task.org_id, cron_task.org_id)
        self.assertEqual(updated_task.status, "inactive")
        self.assertEqual(updated_task.every, "3m")
        self.assertEqual(updated_task.cron, None)
        self.assertIsNotNone(updated_task.updated_at)
        now = datetime.datetime.now()
        now.astimezone()
        self.assertLess(updated_task.updated_at,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertEqualIgnoringWhitespace(updated_task.flux, flux)

        self.assertEqual(updated_task.description, "Updated description")

    def test_member(self):
        task = self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                               TASK_FLUX, "0 2 * * *",
                                               self.organization.id)
        members = self.tasks_api.get_members(task_id=task.id)
        self.assertEqual(len(members), 0)
        user = self.users_api.create_user(self.generate_name("Luke Health"))

        resource_member = self.tasks_api.add_member(member_id=user.id,
                                                    task_id=task.id)
        self.assertIsNotNone(resource_member)
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "member")

        members = self.tasks_api.get_members(task_id=task.id)
        resource_member = members[0]
        self.assertEqual(len(members), 1)
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "member")

        self.tasks_api.delete_member(member_id=user.id, task_id=task.id)
        members = self.tasks_api.get_members(task_id=task.id)
        self.assertEqual(len(members), 0)

    def test_owner(self):
        task = self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                               TASK_FLUX, "0 2 * * *",
                                               self.organization.id)
        owners = self.tasks_api.get_owners(task_id=task.id)
        self.assertEqual(len(owners), 1)

        user = self.users_api.create_user(self.generate_name("Luke Health"))
        resource_member = self.tasks_api.add_owner(owner_id=user.id,
                                                   task_id=task.id)

        self.assertIsNotNone(resource_member)
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "owner")

        owners = self.tasks_api.get_owners(task_id=task.id)
        self.assertEqual(len(owners), 2)
        resource_member = owners[1]
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "owner")

        self.tasks_api.delete_owner(owner_id=user.id, task_id=task.id)
        owners = self.tasks_api.get_owners(task_id=task.id)
        self.assertEqual(len(owners), 1)

    def test_runs(self):
        task_name = self.generate_name("it task")
        task = self.tasks_api.create_task_every(task_name, TASK_FLUX, "1s",
                                                self.organization)
        time.sleep(5)

        runs = self.tasks_api.get_runs(task_id=task.id, limit=10)
        self.assertGreater(len(runs), 2)

        success_runs = list(filter(lambda x: x.status == "success", runs))
        run = success_runs[0]
        self.assertIsNotNone(run.id)
        self.assertEqual(run.task_id, task.id)
        self.assertEqual(run.status, "success")
        now = datetime.datetime.now()
        self.assertLess(run.scheduled_for,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertLess(run.started_at,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertLess(run.finished_at,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertIsNone(run.requested_at)
        self.assertIsNotNone(run.links)

        self.assertEqual(
            run.links.logs,
            "/api/v2/tasks/" + task.id + "/runs/" + run.id + "/logs")
        self.assertEqual(
            run.links.retry,
            "/api/v2/tasks/" + task.id + "/runs/" + run.id + "/retry")
        self.assertEqual(run.links._self,
                         "/api/v2/tasks/" + task.id + "/runs/" + run.id)
        self.assertEqual(run.links.task, "/api/v2/tasks/" + task.id)

    def test_runs_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.get_runs("020f755c3c082000")
        assert "task not found" in e.value.body

    def test_run_task_manually(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)

        run = self.tasks_api.run_manually(task_id=task.id)
        print(run)

        self.assertIsNotNone(run)
        self.assertTrue(run.status, "scheduled")

    def test_run_task_manually_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.run_manually(task_id="020f755c3c082000")
        assert "failed to force run" in e.value.body

    def test_retry_run(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)

        time.sleep(5)

        runs = self.tasks_api.get_runs(task.id)
        self.assertGreater(len(runs), 1)

        run = self.tasks_api.retry_run(task_id=runs[0].task_id,
                                       run_id=runs[0].id)
        self.assertIsNotNone(run)
        self.assertEqual(run.task_id, runs[0].task_id)

        self.assertEqual(run.status, "scheduled")
        self.assertEqual(run.task_id, task.id)

    def test_retry_run_not_exists(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "5s",
                                                self.organization)
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.retry_run(task_id=task.id,
                                            run_id="020f755c3c082000")
        assert "failed to retry run" in e.value.body

    def test_logs(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "3s",
                                                self.organization)
        time.sleep(6)

        logs = self.tasks_api.get_logs(task_id=task.id)

        for log in logs:
            self.assertIsNotNone(log.time)
            self.assertIsNotNone(log.message)
            print(log)

        self.tasks_api.delete_task(task_id=task.id)

    def test_logs_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.get_logs(task_id="020f755c3c082000")
        assert "failed to find task logs" in e.value.body

    def test_run_logs(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)
        time.sleep(5)
        runs = self.tasks_api.get_runs(task_id=task.id)
        self.assertGreater(len(runs), 0)

        logs = self.tasks_api.get_run_logs(run_id=runs[0].id, task_id=task.id)
        self.assertGreater(len(logs), 0)

        success = False
        for log in logs:
            print(log)
            if log.message.endswith("Completed successfully"):
                success = True

        self.assertTrue(success, "Completed successfully not found in log")

    def test_runs_not_exists(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)

        with pytest.raises(ApiException) as e:
            assert self.tasks_api.get_run_logs(task_id=task.id,
                                               run_id="020f755c3c082000")
        assert "failed to find task logs" in e.value.body

    def test_cancel_run_not_exist(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)
        time.sleep(5)
        runs = self.tasks_api.get_runs(task.id)

        with pytest.raises(ApiException) as e:
            assert self.tasks_api.cancel_run(task_id=task.id,
                                             run_id=runs[0].id)
        assert "failed to cancel run" in e.value.body
        assert "run not found" in e.value.body

    def test_cancel_task_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.cancel_run("020f755c3c082000",
                                             "020f755c3c082000")
        assert "failed to cancel run" in e.value.body
        assert "task not found" in e.value.body
Exemple #8
0
class InfluxBenchmark(Benchmark):
    def __init__(self, loopCount):
        super().__init__('influx', loopCount)
        # self._dbType = 'influx'

        # self._client = InfluxDBClient(host='localhost', port=8086)

    # def _writeBatch(self, tblName):
    #     global value, tick
    #     data = []
    #     for i in range(0, 100):
    #         line = "{},device={} value={} {}".format(
    #             TIME_SERIES_NAME,
    #             tblName,
    #             value,
    #             tick*1000000000)
    #         # print(line)
    #         data.append(line)
    #         value += 1
    #         tick +=1

    #     self._client.write(data, {'db':DB_NAME}, protocol='line')

    def executeWrite(self):
        global tick  # influx tick #TODO refactor

        lineTemplate = TIME_SERIES_NAME + ",rack={},shelf={},barcode='barcode_{}' temperature={},pressure={} {}"

        batchCount = self._loopCount // BATCH_SIZE
        for i in range(batchCount):
            lineBatch = []
            for j in range(BATCH_SIZE):
                n = i * BATCH_SIZE + j  # serial number
                # values first
                # rtName = 'rt_' + str(n) # table name contains serial number, has info
                temperature = 20 + (n % 10)
                pressure = 70 + (n % 10)
                # tags
                shelf = (n // ITEMS_PER_SHELF) % MAX_SHELF  # shelf number
                rack = n // (ITEMS_PER_SHELF * MAX_SHELF)  # rack number
                barcode = rack + shelf
                # now the SQL
                line = lineTemplate.format(
                    rack,
                    shelf,
                    barcode,  # tags
                    temperature,
                    pressure,  # values
                    tick * 1000000000)
                tick += 1
                lineBatch.append(line)
            write_api = self._client.write_api(write_options=SYNCHRONOUS)
            write_api.write(INFLUX_BUCKET, INFLUX_ORG, lineBatch)
            # self._client.write(lineBatch, {'db':DB_NAME}, protocol='line')

    # def doIterate(self):
    #     tblName = Config.getConfig().target_table_name
    #     print("Benchmarking INFLUX database (1 pass) for: {}".format(tblName))

    #     for i in range(0, Config.getConfig().loop_count):
    #         self._writeBatch(tblName)

    def _getOrgIdByName(self, orgName):
        """Find org by name.

        """
        orgApi = self._client.organizations_api()
        orgs = orgApi.find_organizations()
        for org in orgs:
            if org.name == orgName:
                return org.id
        raise PerfGenError("Org not found with name: {}".format(orgName))

    def _fetchAuth(self):
        authApi = self._client.authorizations_api()
        auths = authApi.find_authorizations()
        for auth in auths:
            if auth.token == INFLUX_TOKEN:
                return auth
        raise PerfGenError("No proper auth found")

    def _verifyPermissions(self, perms: list):
        if list:
            return  #OK
        raise PerfGenError("No permission found")

    def prepare(self):
        self._client = InfluxDBClient(url="http://127.0.0.1:8086",
                                      token=INFLUX_TOKEN,
                                      org=INFLUX_ORG)

        auth = self._fetchAuth()

        self._verifyPermissions(auth.permissions)

        bktApi = self._client.buckets_api()
        # Delete
        bkt = bktApi.find_bucket_by_name(INFLUX_BUCKET)
        if bkt:
            bktApi.delete_bucket(bkt)
        # Recreate

        orgId = self._getOrgIdByName(INFLUX_ORG)
        bktApi.create_bucket(bucket=None,
                             bucket_name=INFLUX_BUCKET,
                             org_id=orgId)
Exemple #9
0
class Wiretap:
    def __init__(self, collectors=None):
        if not collectors:
            collectors = ALL_COLLECTORS
        self.collectors = collectors
        self._startup_check()
        self.hashes = get_hashes()
        self.config = read_config()
        self.inventory = read_inventory()
        self.metrics = []
        self.diffs = {}

        self.client = InfluxDBClient(url=settings.INFLUX_HOST,
                                     token=settings.INFLUX_TOKEN)
        self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
        self.query_api = self.client.query_api()
        self.bucket_api = self.client.buckets_api()
        self._db_check()

        self.diff_lock = threading.Lock()
        self.metric_lock = threading.Lock()
        self.threads = list()

        self.RUNMODE = MODE.NORMAL

        welcome = "\nWiretap:\n\nYour inventory:\n"
        for item in self.inventory:
            welcome += f"   {item}\n"
        log.error(welcome)

        self.start_server_threads()

    def start_server_threads(self):
        for server in self.inventory:
            server_thread = threading.Thread(
                target=remote_execution,
                args=(server, self),
                name=f"thread_{server.name}",
                daemon=True,
            )
            self.threads.append(server_thread)
            server_thread.start()

    def aggregate_diff(self, key, value):
        last = self.diffs.get(key)
        with self.diff_lock:
            self.diffs[key] = value  # Trust no one
        if last:
            return value - last

    def add_metric(self, server, metric):
        log.debug(f"add_metric {metric}")
        measurement = field_name = metric.tag
        if name_tuple := metric.tag.split("_", 1):
            if len(name_tuple) == 2:
                measurement, field_name = name_tuple

        if measurement in ["network"]:
            metric.value = self.aggregate_diff(
                server.name + measurement + field_name, metric.value)
            if not metric.value:
                return None

        metric.name = server.name
        point = (Point(measurement).tag("name", metric.name).tag(
            "agg_type", metric.agg_type).field(field_name, metric.value).time(
                datetime.utcfromtimestamp(metric.time), WritePrecision.S))

        if self.RUNMODE is MODE.NORMAL:
            if self.add_hash(hash(str(point.__dict__))):
                with self.metric_lock:
                    self.add_point_to_db(point)
                    self.metrics.append(metric.json())
        else:
            print(metric.json())
Exemple #10
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self):
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2',
                                                _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug',
                                             _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if len(influxdb_options.keys()) == 0:
            raise FailedInitialization("missing 'influxdb2' options")

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url,
                                          token=self._token,
                                          org=self._org,
                                          enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)"
                )
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            cs_esphome_debug = os.getenv(_DEBUG_ENV_VAR,
                                         'False').lower() in ('true', '1', 't')
            try:
                if cs_esphome_debug and debug_options.get(
                        'delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(
                        f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(
                        cs_esphome_debug
                        and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(
                        f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'"
                    )
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(
                f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'"
            )
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f" client {e}")
            self._client = None
        except NewConnectionError:
            _LOGGER.error(
                f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(
                f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_point(self, measurement, tags, field, value, timestamp=None):
        """Write a single sensor to the database."""
        timestamp = timestamp if timestamp is not None else int(time.time())
        lp_tags = ''
        separator = ''
        for tag in tags:
            lp_tags += f"{separator}{tag.get('t')}={tag.get('v')}"
            separator = ','
        lp = f"{measurement}," + lp_tags + f" {field}={value} {timestamp}"

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=lp,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_point(): {e}")

    def write_points(self, points):
        """Write a list of points to the database."""
        try:
            self._write_api.write(bucket=self._bucket,
                                  record=points,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_points(): {e}")

    def write_batch_sensors(self, batch_sensors, timestamp=None):
        """Write a batch of sensors to the database."""

        if len(batch_sensors) == 0:
            return

        timestamp = timestamp if timestamp is not None else int(time.time())

        batch = []
        for record in batch_sensors:
            sensor = record.get('sensor', None)
            state = record.get('state', None)
            measurement = sensor.get('measurement', None)
            device = sensor.get('device', None)
            location = sensor.get('location', None)
            precision = sensor.get('precision', None)
            if measurement is None or device is None:
                raise InfluxDBFormatError(
                    "'measurement' and/or 'device' are required")

            location_tag = '' if not location or not len(
                location) else f',_location={location}'
            device_tag = f',_device={device}'
            value = round(
                state, precision) if ((precision is not None)
                                      and isinstance(state, float)) else state
            lp = f'{measurement}{device_tag}{location_tag} sample={value} {timestamp}'
            batch.append(lp)

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=batch,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_batch_sensors(): {e}")

    def delete_bucket(self):
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(bucket_name=self._bucket,
                                                   org_id=self._org,
                                                   retention_rules=None,
                                                   org=None)
                if bucket:
                    _LOGGER.info(
                        f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in connect_bucket(): {e}")
class InfluxDBClientTestIT(BaseTest):
    httpRequest = []

    def tearDown(self) -> None:
        super(InfluxDBClientTestIT, self).tearDown()
        if hasattr(self, 'httpd'):
            self.httpd.shutdown()
        if hasattr(self, 'httpd_thread'):
            self.httpd_thread.join()
        InfluxDBClientTestIT.httpRequest = []

    def test_proxy(self):
        self._start_proxy_server()

        self.client.close()
        self.client = InfluxDBClient(
            url=self.host,
            token=self.auth_token,
            proxy=f"http://localhost:{self.httpd.server_address[1]}",
            proxy_headers={'ProxyHeader': 'Val'})
        ready = self.client.ready()
        self.assertEqual(ready.status, "ready")
        self.assertEqual(1, len(InfluxDBClientTestIT.httpRequest))
        self.assertEqual(
            'Val',
            InfluxDBClientTestIT.httpRequest[0].headers.get('ProxyHeader'))

    def test_ping(self):
        ping = self.client.ping()
        self.assertTrue(ping)

    def test_ping_not_running_instance(self):
        client_not_running = InfluxDBClient("http://localhost:8099",
                                            token="my-token",
                                            debug=True)
        ping = client_not_running.ping()
        self.assertFalse(ping)
        client_not_running.close()

    def test_version(self):
        version = self.client.version()
        self.assertTrue(len(version) > 0)

    def test_version_not_running_instance(self):
        client_not_running = InfluxDBClient("http://localhost:8099",
                                            token="my-token",
                                            debug=True)
        with self.assertRaises(NewConnectionError):
            client_not_running.version()

        client_not_running.close()

    def test_username_password_authorization(self):
        self.client.close()
        self.client = InfluxDBClient(url=self.host,
                                     username="******",
                                     password="******",
                                     debug=True)
        self.client.query_api().query("buckets()", "my-org")

    def test_query_and_debug(self):
        self.client.close()
        self.client = InfluxDBClient(url=self.host,
                                     token="my-token",
                                     debug=True)
        # Query
        results = self.client.query_api().query("buckets()", "my-org")
        self.assertIn(
            "my-bucket",
            list(map(lambda record: record["name"], results[0].records)))
        # Query RAW
        results = self.client.query_api().query_raw("buckets()", "my-org")
        self.assertIn("my-bucket", codecs.decode(results.data))
        # Bucket API
        results = self.client.buckets_api().find_buckets()
        self.assertIn("my-bucket",
                      list(map(lambda bucket: bucket.name, results.buckets)))

    def _start_proxy_server(self):
        import http.server
        import urllib.request

        class ProxyHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
            def do_GET(self):
                InfluxDBClientTestIT.httpRequest.append(self)
                self.send_response(200)
                self.send_header('Content-type', 'application/json')
                self.end_headers()
                self.copyfile(urllib.request.urlopen(self.path), self.wfile)

        self.httpd = http.server.HTTPServer(('localhost', 0),
                                            ProxyHTTPRequestHandler)
        self.httpd_thread = threading.Thread(target=self.httpd.serve_forever)
        self.httpd_thread.start()
Exemple #12
0
class InfluxDBWriter(Writer):
  """Write to the specified file. If filename is empty, write to stdout."""
  def __init__(self, bucket_name):
    """
    Write data records to the InfluxDB.
    ```
    bucket_name  the name of the bucket in InfluxDB.  If the bucket does
    not exists then this writer will try to create it.
    ```
    """
    super().__init__(input_format=Text)

    if not INFLUXDB_SETTINGS_FOUND:
      raise RuntimeError('File database/settings.py not found. '
                         'InfluxDB functionality is not available. Have '
                         'you copied over database/settings.py.dist '
                         'to database/settings.py and followed the '
                         'configuration instructions in it?')
    if not INFLUXDB_CLIENT_FOUND:
      raise RuntimeError('Python module influxdb_client not found. Please '
                         'install using "pip install influxdb_client" prior '
                         'to using InfluxDBWriter.')

    self.client = InfluxDBClient(url=INFLUXDB_URL, token=INFLUXDB_AUTH_TOKEN, org=INFLUXDB_ORG)

    # get the orgID from the name:
    try:
      self.organizations_api = self.client.organizations_api()
      orgs = self.organizations_api.find_organizations()
    except:
      raise RuntimeError('Error connecting to the InfluxDB API. '
                         'Please confirm that InfluxDB is running and '
                         'that the authentication token is correct.')

    our_org = next((org for org in orgs if org.name == INFLUXDB_ORG), None)

    if not our_org:
      raise RuntimeError('Can not find the organization "' + INFLUXDB_ORG + '" in InfluxDB')

    self.org_id = our_org.id

    # get the bucketID from the name:
    self.bucket_api = self.client.buckets_api()
    bucket = self.bucket_api.find_bucket_by_name(bucket_name)

    # if the bucket does not exist then try to create it
    if not bucket:
      try:
        new_bucket = self.bucket_api.create_bucket(bucket_name=bucket_name, org_id=self.org_id)
        logging.info('Creating new bucket for: %s', bucket_name)
        self.bucket_id = new_bucket.id
      except:
        raise RuntimeError('Can not create bucket in InfluxDB for ' + bucket_name)
    else:
      self.bucket_id = bucket.id

    self.write_api = self.client.write_api(write_options=ASYNCHRONOUS)

  ############################
  def write(self, record):
    """
    Note: Assume record is a dict or list of dict. Each dict contains a list
    of "fields" and float "timestamp" (UTC epoch seconds)
    """
    if record is None:
      return

    logging.info('InfluxDBWriter writing record: %s', record)

    if type(record) is not dict and type(record) is not list:
      logging.warning('InfluxDBWriter could not ingest record '
                      'type %s: %s', type(record), str(record))

    try:
      if type(record) is list:
        influxDB_record = map(lambda single_record: {"measurement": single_record['data_id'], "tags": {"sensor": single_record['data_id'] }, "fields": single_record['fields'], "time": int(single_record['timestamp']*1000000000) }, record)
      else:
        influxDB_record = {"measurement": record['data_id'], "tags": {"sensor": record['data_id'] }, "fields": record['fields'], "time": int(record['timestamp']*1000000000) }

      self.write_api.write(self.bucket_id, self.org_id, influxDB_record)
      return

    except:
      logging.warning('InfluxDBWriter could not ingest record '
                      'type %s: %s', type(record), str(record))
Exemple #13
0
class GzipSupportTest(BaseTest):
    def setUp(self) -> None:
        super(GzipSupportTest, self).setUp()

        httpretty.enable()
        httpretty.reset()

    def tearDown(self) -> None:
        self.client.close()
        httpretty.disable()

    def test_gzip_disabled(self):
        query_response = \
            "#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string\n" \
            "#group,false,false,false,false,false,false,false,false,false,true\n#default,_result,,,,,,,,\n" \
            ",result,table,_start,_stop,_time,_value,_field,_measurement,host\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,121,free,mem,A\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,122,free,mem,A\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,123,free,mem,A\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,124,free,mem,A\n"
        httpretty.register_uri(
            httpretty.GET,
            uri="http://localhost/api/v2/me",
            status=200,
            body="{\"name\":\"Tom\"}",
            adding_headers={'Content-Type': 'application/json'})
        httpretty.register_uri(httpretty.POST,
                               uri="http://localhost/api/v2/write",
                               status=204)
        httpretty.register_uri(httpretty.POST,
                               uri="http://localhost/api/v2/query",
                               status=200,
                               body=query_response)

        self.client = InfluxDBClient("http://localhost",
                                     "my-token",
                                     org="my-org",
                                     enable_gzip=False)

        _user = self.client.users_api().me()
        self.assertEqual("Tom", _user._name)

        _response = self.client.write_api(write_options=SYNCHRONOUS) \
            .write("my-bucket", "my-org", "h2o_feet,location=coyote_creek water_level=1 1")
        self.assertEqual(None, _response)

        _tables = self.client.query_api() \
            .query('from(bucket:"my-bucket") |> range(start: 1970-01-01T00:00:00.000000001Z) |> last()', "my-org")
        self.assertEqual(1, len(_tables))
        self.assertEqual(4, len(_tables[0].records))
        self.assertEqual(121, _tables[0].records[0].get_value())
        self.assertEqual(122, _tables[0].records[1].get_value())
        self.assertEqual(123, _tables[0].records[2].get_value())
        self.assertEqual(124, _tables[0].records[3].get_value())

        _requests = httpretty.httpretty.latest_requests
        self.assertEqual(3, len(_requests))

        # Unsupported
        self.assertEqual("/api/v2/me", _requests[0].path)
        self.assertEqual(None, _requests[0].headers['Content-Encoding'])
        self.assertEqual("identity", _requests[0].headers['Accept-Encoding'])
        # Write
        self.assertEqual(
            "/api/v2/write?org=my-org&bucket=my-bucket&precision=ns",
            _requests[1].path)
        self.assertEqual("identity", _requests[1].headers['Content-Encoding'])
        self.assertEqual("identity", _requests[1].headers['Accept-Encoding'])
        self.assertEqual("h2o_feet,location=coyote_creek water_level=1 1",
                         _requests[1].parsed_body)
        # Query
        self.assertEqual("/api/v2/query?org=my-org", _requests[2].path)
        self.assertEqual(None, _requests[2].headers['Content-Encoding'])
        self.assertEqual("identity", _requests[2].headers['Accept-Encoding'])
        self.assertTrue(
            'from(bucket:"my-bucket") |> range(start: 1970-01-01T00:00:00.000000001Z) |> last()'
            in str(_requests[2].parsed_body))

    def test_gzip_enabled(self):
        query_response = \
            "#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string\n" \
            "#group,false,false,false,false,false,false,false,false,false,true\n#default,_result,,,,,,,,\n" \
            ",result,table,_start,_stop,_time,_value,_field,_measurement,host\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,121,free,mem,A\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,122,free,mem,A\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,123,free,mem,A\n" \
            ",,0,1970-01-01T00:00:10Z,1970-01-01T00:00:20Z,1970-01-01T00:00:10Z,124,free,mem,A\n"
        httpretty.register_uri(
            httpretty.GET,
            uri="http://localhost/api/v2/me",
            status=200,
            body="{\"name\":\"Tom\"}",
            adding_headers={'Content-Type': 'application/json'})
        httpretty.register_uri(httpretty.POST,
                               uri="http://localhost/api/v2/write",
                               status=204)
        httpretty.register_uri(httpretty.POST,
                               uri="http://localhost/api/v2/query",
                               status=200,
                               body=gzip.compress(
                                   bytes(query_response, "utf-8")),
                               adding_headers={'Content-Encoding': 'gzip'})

        self.client = InfluxDBClient("http://localhost",
                                     "my-token",
                                     org="my-org",
                                     enable_gzip=True)
        _user = self.client.users_api().me()
        self.assertEqual("Tom", _user._name)

        _response = self.client.write_api(write_options=SYNCHRONOUS) \
            .write("my-bucket", "my-org", "h2o_feet,location=coyote_creek water_level=1 1")
        self.assertEqual(None, _response)

        _tables = self.client.query_api() \
            .query('from(bucket:"my-bucket") |> range(start: 1970-01-01T00:00:00.000000001Z) |> last()', "my-org")
        self.assertEqual(1, len(_tables))
        self.assertEqual(4, len(_tables[0].records))
        self.assertEqual(121, _tables[0].records[0].get_value())
        self.assertEqual(122, _tables[0].records[1].get_value())
        self.assertEqual(123, _tables[0].records[2].get_value())
        self.assertEqual(124, _tables[0].records[3].get_value())

        _requests = httpretty.httpretty.latest_requests
        self.assertEqual(3, len(_requests))

        # Unsupported
        self.assertEqual("/api/v2/me", _requests[0].path)
        self.assertEqual(None, _requests[0].headers['Content-Encoding'])
        self.assertEqual("identity", _requests[0].headers['Accept-Encoding'])
        # Write
        self.assertEqual(
            "/api/v2/write?org=my-org&bucket=my-bucket&precision=ns",
            _requests[1].path)
        self.assertEqual("gzip", _requests[1].headers['Content-Encoding'])
        self.assertEqual("identity", _requests[1].headers['Accept-Encoding'])
        self.assertNotEqual("h2o_feet,location=coyote_creek water_level=1 1",
                            _requests[1].parsed_body)
        # Query
        self.assertEqual("/api/v2/query?org=my-org", _requests[2].path)
        self.assertEqual(None, _requests[2].headers['Content-Encoding'])
        self.assertEqual("gzip", _requests[2].headers['Accept-Encoding'])
        self.assertTrue(
            'from(bucket:"my-bucket") |> range(start: 1970-01-01T00:00:00.000000001Z) |> last()'
            in str(_requests[2].parsed_body))

    def test_write_query_gzip(self):
        httpretty.disable()

        self.client = InfluxDBClient(self.host,
                                     token="my-token",
                                     org="my-org",
                                     debug=False,
                                     enable_gzip=True)
        self.api_client = self.client.api_client
        self.buckets_client = self.client.buckets_api()
        self.query_client = self.client.query_api()
        self.org = "my-org"
        self.my_organization = self.find_my_org()

        _bucket = self.create_test_bucket()

        self.client.write_api(write_options=SYNCHRONOUS) \
            .write(_bucket.name, self.org, "h2o_feet,location=coyote_creek water_level=111 1")

        _result = self.query_client.query(
            f"from(bucket:\"{_bucket.name}\") |> range(start: 1970-01-01T00:00:00.000000001Z)",
            self.org)

        self.assertEqual(len(_result), 1)
        self.assertEqual(_result[0].records[0].get_measurement(), "h2o_feet")
        self.assertEqual(_result[0].records[0].get_value(), 111.0)
        self.assertEqual(_result[0].records[0].get_field(), "water_level")
Exemple #14
0
class InfluxAPI(Processor):
    def __init__(self,
                 url: str,
                 token: str,
                 org: str,
                 data_bucket: str,
                 meta_bucket: str,
                 workers: int = cpu_count()):
        super().__init__()
        self.client = InfluxDBClient(url=url, token=token, org=org)
        self.url = url
        self.token = token
        self.org = org

        if not self.check_bucket_exists(data_bucket):
            raise KeyError(f"Data bucket {data_bucket} as does not exist")
        if not self.check_bucket_exists(meta_bucket):
            raise KeyError(f"Meta bucket {meta_bucket} as does not exist")

        self.data_bucket = data_bucket
        self.meta_bucket = meta_bucket

        # write with batch api with sane looking defaults
        self.api = self.client.write_api(
            write_options=WriteOptions(batch_size=200,
                                       flush_interval=2000,
                                       jitter_interval=100,
                                       retry_interval=2000,
                                       write_scheduler=ThreadPoolScheduler(
                                           max_workers=workers)))

    def __del__(self):
        # Cleanup connection
        self.client.close()

    def check_bucket_exists(self, bucket_name: str) -> bool:
        return self.client.buckets_api().find_bucket_by_name(
            bucket_name=bucket_name)

    @staticmethod
    def _generate_data(data: List[dict]) -> List[Point]:
        points = []
        for tags, fields, timestamp in map(
                itemgetter('tags', 'fields', 'timestamp'), data):
            collector = tags['collector']
            # Every measurement goes against what collected it
            p = Point.measurement(collector)
            # which makes the collector tag not needed, remove to reduce dimensionality
            [
                p.tag(key, value) for key, value in tags.items()
                if key != 'collector' and not isinstance(value, type(None))
            ]
            [
                p.field(key, value) for key, value in fields.items()
                if not isinstance(value, type(None))
            ]
            p.time(datetime.fromtimestamp(timestamp, tz=timezone.utc),
                   write_precision=WritePrecision.S)
            points.append(p)
        return points

    def process(self, data: dict) -> Optional[ProcessedData]:
        p = self._process(data)
        p.discard_strings()
        return p

    def send_data(self, data: List[dict], **kwargs) -> bool:
        """Sending specific data. Assumes its in the common format"""
        if not kwargs.get('bucket', None):
            raise KeyError("Expected to have bucket name passed in")
        self.api.write(bucket=kwargs['bucket'],
                       record=self._generate_data(data))
        return True

    def send(self, data: ProcessedData) -> bool:
        self.api.write(bucket=self.data_bucket,
                       record=self._generate_data(data.data))
        self.api.write(bucket=self.data_bucket,
                       record=self._generate_data(data.all_meta_data))
        return True
Exemple #15
0
class TimeseriesClient:
    host: Union[str, None]
    port: int
    token: Union[str, None]
    organization: str

    def __init__(
        self,
        host: str = None,
        port: int = None,
        organization: str = "GEWV",
        token: str = None,
        client: InfluxDBClient = None,
        verify_ssl: bool = True,
    ):
        if client is None:
            if host is None:
                raise Exception(
                    "Missing Host Address for Timeseries DB Client.")

            if port is None:
                raise Exception("Missing Port for Timeseries DB Client.")

            if token is None:
                raise Exception("Missing Token for Timeseries DB Client.")

            protocol = "https" if verify_ssl else "http"

            self._client = InfluxDBClient(
                url=f"{protocol}://{host}:{port}",
                token=token,
                verify_ssl=verify_ssl,
            )

            if len(organization) != 16:
                # receive id of the org and store the info
                self._org_api = self._client.organizations_api()
                self._org_id = self.get_org_id_by_name(org_name=organization)

                if self._org_id is None:
                    raise Exception(
                        f"The organization {organization} dont exists in InfluxDB. Break execution."
                    )

                self._client.org = self._org_id
            else:
                self._client.org = organization
        else:
            self._client = client

        self._org_api = self._client.organizations_api()
        self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
        self._query_api = self._client.query_api()
        self._bucket_api = self._client.buckets_api()

        self._grafana_api = GrafanaApi(host=host, port=3000, use_tls=False)

    @staticmethod
    def from_env_properties():
        client = InfluxDBClient.from_env_properties()
        return TimeseriesClient(client=client)

    def health(self):
        return self._client.health()

    def get_org_id_by_name(self, org_name: str) -> Union[str, None]:
        orgs: List[Organization] = self._org_api.find_organizations()
        for org in orgs:
            if org.name == org_name:
                return org.id

        return None

    def create_bucket(self, bucket: str):
        try:
            self._bucket_api.create_bucket(bucket_name=bucket)
        except ApiException as err:
            if err.status != 422:
                raise

    def exist_bucket(self, bucket: str):
        return self._bucket_api.find_bucket_by_name(bucket_name=bucket)

    def get_bucket_by_name(self, bucket_name: str):
        return self._bucket_api.find_bucket_by_name(bucket_name=bucket_name)

    def delete_bucket(self, bucket: str):
        bucket_id = self.get_bucket_by_name(bucket_name=bucket)
        return self._bucket_api.delete_bucket(bucket=bucket_id)

    def get_grafana_orgs(self) -> List[GrafanaOrganization]:
        return self._grafana_api.get_organizations()

    def get_grafana_org(self, org_name: str) -> GrafanaOrganization:
        return self._grafana_api.get_organization_by_name(org_name=org_name)

    def create_grafana_org(self, org_name: str):
        return self._grafana_api.create_organization(org_name=org_name)

    def delete_grafana_org(self, org_name: str):
        org = self.get_grafana_org(org_name=org_name)

        if org is None:
            raise Exception(
                f"Cant delete grafana org {org_name}. Org not exist!")

        return self._grafana_api.delete_organization(org["id"])

    def create_project(self, project_name: str):
        # Steps
        # 1. create new bucket
        # 2. create token for bucket
        # 3. create new org in grafana
        # 4. create new source in grafana
        pass

    def get_points(
        self,
        **kwargs,
    ) -> List[FluxTable]:
        if not self.health:
            raise Exception("Influx DB is not reachable or unhealthy.")

        tables = self._query_api.query(query=self.build_query(**kwargs))

        return tables

    def get_dataframe(self, **kwargs):
        return self.query_dataframe(flux_query=self.build_query(**kwargs))

    def query_dataframe(
        self,
        flux_query: str,
    ):
        """
        with this function you can send a own query to InfluxDB and
        you will get back a dataframe with datetimeindex
        """

        if not self.health:
            raise Exception("Influx DB is not reachable or unhealthy.")

        df = cast(
            DataFrame,
            self._query_api.query_data_frame(query=flux_query),
        )

        if "_time" in df.columns:
            df = df.set_index(pd.to_datetime(df["_time"]))

        return df

    def write_points(self, project: str, points: List[Point]):
        self._write_api.write(bucket=project, record=points)

    def write_a_dataframe(
        self,
        project: str,
        measurement_name: str,
        dataframe: pd.DataFrame,
        tag_columns: List[str] = [],
        additional_tags: Dict[str, str] = None,
    ):
        """
        Write a pandas dataframe to the influx db. You can define some
        tags, that are appended to every entry.
        """

        if additional_tags is None:
            self._write_api.write(
                bucket=project,
                record=dataframe,
                data_frame_measurement_name=measurement_name,
                data_frame_tag_columns=tag_columns,
            )
            return

        tags_dataframe = pd.DataFrame(index=dataframe.index)

        # create the dataframe with the tags
        for tag_name, tag in additional_tags.items():
            tag_columns.append(tag_name)
            tags_dataframe[tag_name] = [tag] * len(dataframe)

        combined_frames = pd.concat([dataframe, tags_dataframe], axis=1)

        self._write_api.write(
            bucket=project,
            record=combined_frames,
            data_frame_measurement_name=measurement_name,
            data_frame_tag_columns=tag_columns,
        )

    def build_query(
        self,
        project: str,
        fields: Dict[str, str] = {},
        start_time: Optional[datetime] = None,
        end_time: Optional[datetime] = None,
        precision: str = "5m",
    ) -> str:

        query = f"""
            from(bucket: "{project}")
        """

        if start_time is not None and end_time is not None:
            self.test_datetime(start_time)
            self.test_datetime(end_time)

            query += f"""
                |> range(start: {start_time.isoformat()}, stop: {end_time.isoformat()})
            """
        elif start_time is not None:
            self.test_datetime(start_time)

            query += f"""
                |> range(start: {start_time.isoformat()})
            """

        elif end_time is not None:
            self.test_datetime(end_time)

            query += f"""
                |> range(stop: {end_time.isoformat()})
            """

        for f, v in fields.items():
            query += f"""
                |> filter(fn: (r) => r["{f}"] == "{v}")
            """

        query += f"""
            |> aggregateWindow(every: {precision}, fn: mean, createEmpty: true)
            |> yield(name: "mean")
        """

        return query

    @staticmethod
    def test_datetime(dt: datetime):
        if not isinstance(dt, datetime):
            raise Exception(
                f"The delivered datetime {dt} is not from type datetime.")

        if dt.tzinfo is None:
            raise Exception(
                f"The time {dt.isoformat()} has no timezone info. That is necassary."
            )
Exemple #16
0
    TMP_FILE_PATH = data["tmp_path"]
    IP_LOOKUP_TABLE_PATH = data["ip_path"]
    PREV_PULLED_DATA_PATH = data["cache_path"]
    SYS_LOG_PATH = data["syslog"]
    OPENVPNLOG_PATH = data["vpn_status"]
    BUCKET = platform.uname()[1] + "-VPN"
    IPDB_PATH = data["ipdbpath"]
except:
    print("Issue with Config, please run the command init command")

###influxdb Parameters
try:
    start_time = time.perf_counter()
    client = InfluxDBClient(url=URL, token=TOKEN, org=ORG)
    write_api = client.write_api(write_options=SYNCHRONOUS)
    bucket_api = client.buckets_api()
except:
    print("Issue with Config, please run the command init command")


### Fucntions
def main():
    if sys.argv[1].casefold() == "init":
        init_environment()
        return

    try:
        bucket_api.create_bucket(bucket=Bucket(
            name=BUCKET,
            org_id=ORG_ID,
            retention_rules=[BucketRetentionRules(every_seconds=604800)]))