class TasksApiTest(BaseTest):
    def setUp(self) -> None:
        super(TasksApiTest, self).setUp()

        self.organization = self.find_my_org()
        self.authorization = self.add_tasks_authorization(self.organization)
        self.client.close()

        self.client = InfluxDBClient(self.host,
                                     self.authorization.token,
                                     debug=self.conf.debug)
        self.tasks_api = self.client.tasks_api()

        tasks = self.tasks_api.find_tasks()
        for task in tasks:
            if task.name.endswith("-IT"):
                self.tasks_api.delete_task(task.id)

    def add_tasks_authorization(self, organization):
        resource = PermissionResource(org=organization.name, type="tasks")

        create_task = Permission(resource=resource, action="read")
        delete_task = Permission(resource=resource, action="write")

        org_resource = PermissionResource(type="orgs")
        create_org = Permission(resource=org_resource, action="write")
        read_org = Permission(resource=org_resource, action="read")

        user_resource = PermissionResource(type="users")
        create_users = Permission(resource=user_resource, action="write")

        label_resource = PermissionResource(type="labels")
        create_labels = Permission(resource=label_resource, action="write")

        auth_resource = PermissionResource(type="authorizations")
        create_auth = Permission(resource=auth_resource, action="write")

        bucket = self.client.buckets_api().find_bucket_by_name("my-bucket")
        bucket_resource = PermissionResource(org_id=organization.id,
                                             id=bucket.id,
                                             type="buckets")
        read_bucket = Permission(resource=bucket_resource, action="read")
        write_bucket = Permission(resource=bucket_resource, action="write")

        return self.client.authorizations_api().create_authorization(
            org_id=organization.id,
            permissions=[
                create_task, delete_task, create_org, read_org, create_users,
                create_labels, create_auth, read_bucket, write_bucket
            ])

    def test_create_task(self):
        task_name = self.generate_name("it_task")

        flux = \
            '''option task = {{ 
                name: "{task_name}",
                every: 1h
            }}
            {flux}
            '''.format(task_name=task_name, flux=TASK_FLUX)

        task = Task(id=0,
                    name=task_name,
                    org_id=self.organization.id,
                    flux=flux,
                    status="active",
                    description="Task Description")

        task = self.tasks_api.create_task(task)

        print(task)

        self.assertIsNotNone(task)
        self.assertGreater(len(task.id), 1)

        self.assertEqual(task.name, task_name)
        self.assertEqual(task.org_id, self.organization.id)
        self.assertEqual(task.status, "active")
        self.assertEqual(task.every, "1h")
        self.assertEqual(task.cron, None)
        self.assertEqualIgnoringWhitespace(task.flux, flux)

        self.assertEqual(task.description, "Task Description")

    def test_create_task_with_offset(self):
        task_name = self.generate_name("it_task")

        flux = \
            '''option task = {{ 
                name: "{task_name}",
                every: 1h,
                offset: 30m
            }}
            {flux}
            '''.format(task_name=task_name, flux=TASK_FLUX)

        task = Task(id=0,
                    name=task_name,
                    org_id=self.organization.id,
                    flux=flux,
                    status="active",
                    description="Task Description")

        task = self.tasks_api.create_task(task)

        print(task)

        self.assertIsNotNone(task)
        self.assertEqual(task.offset, "30m")

    def test_create_task_every(self):
        task_name = self.generate_name("it_task")
        task = self.tasks_api.create_task_every(task_name, TASK_FLUX, "1h",
                                                self.organization)
        print(task)

        self.assertIsNotNone(task)
        self.assertGreater(len(task.id), 1)

        self.assertEqual(task.name, task_name)
        self.assertEqual(task.org_id, self.organization.id)
        self.assertEqual(task.status, "active")
        self.assertEqual(task.every, "1h")
        self.assertEqual(task.cron, None)
        self.assertTrue(task.flux.endswith(TASK_FLUX))

    def test_create_task_cron(self):
        task_name = self.generate_name("it task")
        task = self.tasks_api.create_task_cron(task_name, TASK_FLUX,
                                               "0 2 * * *",
                                               self.organization.id)

        self.assertIsNotNone(task)
        self.assertGreater(len(task.id), 1)

        self.assertEqual(task.name, task_name)
        self.assertEqual(task.org_id, self.organization.id)
        self.assertEqual(task.status, "active")
        self.assertEqual(task.every, None)
        self.assertEqual(task.cron, "0 2 * * *")
        # self.assertEqualIgnoringWhitespace(task.flux, flux)

        self.assertTrue(task.flux.endswith(TASK_FLUX))
        # self.assertEqual(task.links, "active")

        links = task.links
        self.assertIsNotNone(task.links)
        self.assertEqual(links.logs, "/api/v2/tasks/" + task.id + "/logs")
        self.assertEqual(links.members,
                         "/api/v2/tasks/" + task.id + "/members")
        self.assertEqual(links.owners, "/api/v2/tasks/" + task.id + "/owners")
        self.assertEqual(links.runs, "/api/v2/tasks/" + task.id + "/runs")
        self.assertEqual(links._self, "/api/v2/tasks/" + task.id)

        # TODO missing get labels
        self.assertEqual(links.labels, "/api/v2/tasks/" + task.id + "/labels")

    def test_find_task_by_id(self):
        task_name = self.generate_name("it task")
        task = self.tasks_api.create_task_cron(task_name, TASK_FLUX,
                                               "0 2 * * *",
                                               self.organization.id)

        task_by_id = self.tasks_api.find_task_by_id(task.id)
        self.assertEqual(task, task_by_id)

    @pytest.mark.skip(
        reason="https://github.com/influxdata/influxdb/issues/13576")
    @pytest.mark.skip(
        reason="https://github.com/influxdata/influxdb/issues/11590")
    def test_find_task_by_user_id(self):
        task_user = self.users_api.create_user(self.generate_name("TaskUser"))
        self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                        TASK_FLUX, "0 2 * * *",
                                        self.organization.id)
        tasks = self.tasks_api.find_tasks_by_user(task_user_id=task_user.id)
        print(tasks)
        self.assertEquals(len(tasks), 1)

    def test_delete_task(self):
        task = self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                               TASK_FLUX, "0 2 * * *",
                                               self.organization.id)
        self.assertIsNotNone(task)

        self.tasks_api.delete_task(task.id)
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.find_task_by_id(task_id=task.id)
        assert "failed to find task" in e.value.body

    def test_update_task(self):
        task_name = self.generate_name("it task")
        cron_task = self.tasks_api.create_task_cron(task_name, TASK_FLUX,
                                                    "0 2 * * *",
                                                    self.organization.id)

        flux = '''
        option task = {{
            name: "{task_name}",
            every: 3m
        }}
        
        {flux}
        '''.format(task_name=task_name, flux=TASK_FLUX)

        cron_task.cron = None
        cron_task.every = "3m"
        cron_task.status = "inactive"
        cron_task.description = "Updated description"

        updated_task = self.tasks_api.update_task(cron_task)
        time.sleep(1)

        self.assertIsNotNone(updated_task)
        self.assertGreater(len(updated_task.id), 1)

        self.assertEqual(updated_task.name, task_name)
        self.assertEqual(updated_task.org_id, cron_task.org_id)
        self.assertEqual(updated_task.status, "inactive")
        self.assertEqual(updated_task.every, "3m")
        self.assertEqual(updated_task.cron, None)
        self.assertIsNotNone(updated_task.updated_at)
        now = datetime.datetime.now()
        now.astimezone()
        self.assertLess(updated_task.updated_at,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertEqualIgnoringWhitespace(updated_task.flux, flux)

        self.assertEqual(updated_task.description, "Updated description")

    def test_member(self):
        task = self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                               TASK_FLUX, "0 2 * * *",
                                               self.organization.id)
        members = self.tasks_api.get_members(task_id=task.id)
        self.assertEqual(len(members), 0)
        user = self.users_api.create_user(self.generate_name("Luke Health"))

        resource_member = self.tasks_api.add_member(member_id=user.id,
                                                    task_id=task.id)
        self.assertIsNotNone(resource_member)
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "member")

        members = self.tasks_api.get_members(task_id=task.id)
        resource_member = members[0]
        self.assertEqual(len(members), 1)
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "member")

        self.tasks_api.delete_member(member_id=user.id, task_id=task.id)
        members = self.tasks_api.get_members(task_id=task.id)
        self.assertEqual(len(members), 0)

    def test_owner(self):
        task = self.tasks_api.create_task_cron(self.generate_name("it_task"),
                                               TASK_FLUX, "0 2 * * *",
                                               self.organization.id)
        owners = self.tasks_api.get_owners(task_id=task.id)
        self.assertEqual(len(owners), 1)

        user = self.users_api.create_user(self.generate_name("Luke Health"))
        resource_member = self.tasks_api.add_owner(owner_id=user.id,
                                                   task_id=task.id)

        self.assertIsNotNone(resource_member)
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "owner")

        owners = self.tasks_api.get_owners(task_id=task.id)
        self.assertEqual(len(owners), 2)
        resource_member = owners[1]
        self.assertEqual(resource_member.id, user.id)
        self.assertEqual(resource_member.name, user.name)
        self.assertEqual(resource_member.role, "owner")

        self.tasks_api.delete_owner(owner_id=user.id, task_id=task.id)
        owners = self.tasks_api.get_owners(task_id=task.id)
        self.assertEqual(len(owners), 1)

    def test_runs(self):
        task_name = self.generate_name("it task")
        task = self.tasks_api.create_task_every(task_name, TASK_FLUX, "1s",
                                                self.organization)
        time.sleep(5)

        runs = self.tasks_api.get_runs(task_id=task.id, limit=10)
        self.assertGreater(len(runs), 2)

        success_runs = list(filter(lambda x: x.status == "success", runs))
        run = success_runs[0]
        self.assertIsNotNone(run.id)
        self.assertEqual(run.task_id, task.id)
        self.assertEqual(run.status, "success")
        now = datetime.datetime.now()
        self.assertLess(run.scheduled_for,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertLess(run.started_at,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertLess(run.finished_at,
                        now.astimezone(tz=datetime.timezone.utc))
        self.assertIsNone(run.requested_at)
        self.assertIsNotNone(run.links)

        self.assertEqual(
            run.links.logs,
            "/api/v2/tasks/" + task.id + "/runs/" + run.id + "/logs")
        self.assertEqual(
            run.links.retry,
            "/api/v2/tasks/" + task.id + "/runs/" + run.id + "/retry")
        self.assertEqual(run.links._self,
                         "/api/v2/tasks/" + task.id + "/runs/" + run.id)
        self.assertEqual(run.links.task, "/api/v2/tasks/" + task.id)

    def test_runs_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.get_runs("020f755c3c082000")
        assert "task not found" in e.value.body

    def test_run_task_manually(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)

        run = self.tasks_api.run_manually(task_id=task.id)
        print(run)

        self.assertIsNotNone(run)
        self.assertTrue(run.status, "scheduled")

    def test_run_task_manually_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.run_manually(task_id="020f755c3c082000")
        assert "failed to force run" in e.value.body

    def test_retry_run(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)

        time.sleep(5)

        runs = self.tasks_api.get_runs(task.id)
        self.assertGreater(len(runs), 1)

        run = self.tasks_api.retry_run(task_id=runs[0].task_id,
                                       run_id=runs[0].id)
        self.assertIsNotNone(run)
        self.assertEqual(run.task_id, runs[0].task_id)

        self.assertEqual(run.status, "scheduled")
        self.assertEqual(run.task_id, task.id)

    def test_retry_run_not_exists(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "5s",
                                                self.organization)
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.retry_run(task_id=task.id,
                                            run_id="020f755c3c082000")
        assert "failed to retry run" in e.value.body

    def test_logs(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "3s",
                                                self.organization)
        time.sleep(6)

        logs = self.tasks_api.get_logs(task_id=task.id)

        for log in logs:
            self.assertIsNotNone(log.time)
            self.assertIsNotNone(log.message)
            print(log)

        self.tasks_api.delete_task(task_id=task.id)

    def test_logs_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.get_logs(task_id="020f755c3c082000")
        assert "failed to find task logs" in e.value.body

    def test_run_logs(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)
        time.sleep(5)
        runs = self.tasks_api.get_runs(task_id=task.id)
        self.assertGreater(len(runs), 0)

        logs = self.tasks_api.get_run_logs(run_id=runs[0].id, task_id=task.id)
        self.assertGreater(len(logs), 0)

        success = False
        for log in logs:
            print(log)
            if log.message.endswith("Completed successfully"):
                success = True

        self.assertTrue(success, "Completed successfully not found in log")

    def test_runs_not_exists(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)

        with pytest.raises(ApiException) as e:
            assert self.tasks_api.get_run_logs(task_id=task.id,
                                               run_id="020f755c3c082000")
        assert "failed to find task logs" in e.value.body

    def test_cancel_run_not_exist(self):
        task = self.tasks_api.create_task_every(self.generate_name("it task"),
                                                TASK_FLUX, "1s",
                                                self.organization)
        time.sleep(5)
        runs = self.tasks_api.get_runs(task.id)

        with pytest.raises(ApiException) as e:
            assert self.tasks_api.cancel_run(task_id=task.id,
                                             run_id=runs[0].id)
        assert "failed to cancel run" in e.value.body
        assert "run not found" in e.value.body

    def test_cancel_task_not_exist(self):
        with pytest.raises(ApiException) as e:
            assert self.tasks_api.cancel_run("020f755c3c082000",
                                             "020f755c3c082000")
        assert "failed to cancel run" in e.value.body
        assert "task not found" in e.value.body
Esempio n. 2
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self) -> bool:
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2', _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug', _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if not influxdb_options.get('enable', None):
            _LOGGER.warning("InfluxDB support is disabled in the YAML configuration file")
            return True

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url, token=self._token, org=self._org, enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)")
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            multisma2_debug = os.getenv(_DEBUG_ENV_VAR, 'False').lower() in ('true', '1', 't')
            try:
                if multisma2_debug and debug_options.get('delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(multisma2_debug and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'")
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
        except NewConnectionError:
            _LOGGER.error(f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_points(self, points):
        if not self._write_api:
            return False
        try:
            self._write_api.write(bucket=self._bucket, record=points, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_points(): {e}")

    def write_history(self, site, topic):
        if not self._write_api:
            return False

        lookup = LP_LOOKUP.get(topic, None)
        if not lookup:
            _LOGGER.error(f"write_history(): unknown topic '{topic}'")
            return False

        measurement = lookup.get('measurement')
        tags = lookup.get('tags', None)
        field = lookup.get('field', None)
        lps = []
        for inverter in site:
            inverter_name = inverter.pop(0)
            name = inverter_name.get('inverter', 'sunnyboy')
            for history in inverter:
                t = history['t']
                v = history['v']
                if v is None:
                    continue
                lp = f"{measurement}"
                if tags and len(tags):
                    lp += f",{tags[0]}={name}"
                if isinstance(v, int):
                    lp += f" {field}={v}i {t}"
                    lps.append(lp)
                else:
                    _LOGGER.error(
                        f"write_history(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                    continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            _LOGGER.debug(f"write_history({site}, {topic}): {lps}")
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_history(): {e}")

    def write_sma_sensors(self, sensor, timestamp=None):
        if not self._client:
            return False

        ts = timestamp if timestamp is not None else int(time.time())
        lps = []
        for old_point in sensor:
            point = old_point.copy()
            topic = point.pop('topic', None)
            point.pop('precision', None)
            if topic:
                lookup = LP_LOOKUP.get(topic, None)
                if not lookup:
                    _LOGGER.error(f"write_sma_sensors(): unknown topic '{topic}'")
                    continue

                if not lookup.get('output', False):
                    continue

                if topic == 'production/today':
                    day = datetime.datetime.fromtimestamp(ts).date()
                    dt = datetime.datetime.combine(day, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/month':
                    month = datetime.date.fromtimestamp(ts).replace(day=1)
                    dt = datetime.datetime.combine(month, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/year':
                    year = datetime.date.fromtimestamp(ts).replace(month=1, day=1)
                    dt = datetime.datetime.combine(year, datetime.time(0, 0))
                    ts = int(dt.timestamp())

                measurement = lookup.get('measurement')
                tags = lookup.get('tags', None)
                for k, v in point.items():
                    field = lookup.get('field')
                    # sample: dc_measurements
                    lp = f'{measurement}'
                    if tags and len(tags):
                        # sample: dc_measurements,_inverter=sb71
                        lp += f',{tags[0]}={k}'
                    if not field:
                        field = k
                    if isinstance(v, int):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v}i {ts}'
                        lps.append(lp)
                    elif isinstance(v, float):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v} {ts}'
                        lps.append(lp)
                    elif isinstance(v, dict):
                        lp_prefix = f'{lp}'
                        for k1, v1 in v.items():
                            # sample: dc_measurements,_inverter=sb71
                            lp = f'{lp_prefix}'
                            if tags and len(tags) > 1:
                                # sample: dc_measurements,_inverter=sb71,_string=a
                                lp += f',{tags[1]}={k1}'
                            if isinstance(v1, int):
                                # sample: dc_measurements,_inverter=sb71,_string=a power=1000 1556813561098
                                lp += f' {field}={v1}i {ts}'
                                lps.append(lp)
                            elif isinstance(v1, float):
                                # sample: dc_measurements,_inverter=sb71,_string=a current=0.23 1556813561098
                                lp += f' {field}={v1} {ts}'
                                lps.append(lp)
                            else:
                                _LOGGER.error(
                                    f"write_sma_sensors(): unanticipated dictionary type '{type(v1)}' in measurement '{measurement}/{field}'")
                    else:
                        _LOGGER.error(
                            f"write_sma_sensors(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                        continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_sma_sensors(): {e}")

    def delete_bucket(self):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(
                    bucket_name=self._bucket, org_id=self._org, retention_rules=None, org=None)
                if bucket:
                    _LOGGER.info(f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}")
        except NewConnectionError:
            raise
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in connect_bucket(): {e}")
Esempio n. 3
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self):
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2',
                                                _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug',
                                             _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if len(influxdb_options.keys()) == 0:
            raise FailedInitialization("missing 'influxdb2' options")

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url,
                                          token=self._token,
                                          org=self._org,
                                          enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)"
                )
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            cs_esphome_debug = os.getenv(_DEBUG_ENV_VAR,
                                         'False').lower() in ('true', '1', 't')
            try:
                if cs_esphome_debug and debug_options.get(
                        'delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(
                        f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(
                        cs_esphome_debug
                        and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(
                        f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'"
                    )
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(
                f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'"
            )
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f" client {e}")
            self._client = None
        except NewConnectionError:
            _LOGGER.error(
                f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(
                f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_point(self, measurement, tags, field, value, timestamp=None):
        """Write a single sensor to the database."""
        timestamp = timestamp if timestamp is not None else int(time.time())
        lp_tags = ''
        separator = ''
        for tag in tags:
            lp_tags += f"{separator}{tag.get('t')}={tag.get('v')}"
            separator = ','
        lp = f"{measurement}," + lp_tags + f" {field}={value} {timestamp}"

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=lp,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_point(): {e}")

    def write_points(self, points):
        """Write a list of points to the database."""
        try:
            self._write_api.write(bucket=self._bucket,
                                  record=points,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_points(): {e}")

    def write_batch_sensors(self, batch_sensors, timestamp=None):
        """Write a batch of sensors to the database."""

        if len(batch_sensors) == 0:
            return

        timestamp = timestamp if timestamp is not None else int(time.time())

        batch = []
        for record in batch_sensors:
            sensor = record.get('sensor', None)
            state = record.get('state', None)
            measurement = sensor.get('measurement', None)
            device = sensor.get('device', None)
            location = sensor.get('location', None)
            precision = sensor.get('precision', None)
            if measurement is None or device is None:
                raise InfluxDBFormatError(
                    "'measurement' and/or 'device' are required")

            location_tag = '' if not location or not len(
                location) else f',_location={location}'
            device_tag = f',_device={device}'
            value = round(
                state, precision) if ((precision is not None)
                                      and isinstance(state, float)) else state
            lp = f'{measurement}{device_tag}{location_tag} sample={value} {timestamp}'
            batch.append(lp)

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=batch,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_batch_sensors(): {e}")

    def delete_bucket(self):
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(bucket_name=self._bucket,
                                                   org_id=self._org,
                                                   retention_rules=None,
                                                   org=None)
                if bucket:
                    _LOGGER.info(
                        f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in connect_bucket(): {e}")