예제 #1
0
class BaseTest(unittest.TestCase):
    def setUp(self) -> None:
        self.conf = influxdb_client.configuration.Configuration()

        self.host = os.getenv('INFLUXDB_V2_URL', "http://localhost:8086")
        self.debug = False
        self.auth_token = os.getenv('INFLUXDB_V2_TOKEN', "my-token")
        self.org = os.getenv('INFLUXDB_V2_ORG', "my-org")

        self.client = InfluxDBClient(url=self.host,
                                     token=self.auth_token,
                                     debug=self.debug,
                                     org=self.org)
        self.api_client = self.client.api_client

        self.query_api = self.client.query_api()
        self.buckets_api = self.client.buckets_api()
        self.users_api = self.client.users_api()
        self.organizations_api = self.client.organizations_api()
        self.authorizations_api = self.client.authorizations_api()
        self.labels_api = self.client.labels_api()

        self.my_organization = self.find_my_org()

    def tearDown(self) -> None:
        self.client.close()

    def create_test_bucket(self):
        bucket_name = generate_bucket_name()
        bucket = self.buckets_api.create_bucket(bucket_name=bucket_name,
                                                org=self.my_organization,
                                                description=bucket_name +
                                                "description")
        return bucket

    def delete_test_bucket(self, bucket):
        return self.buckets_api.delete_bucket(bucket)

    def find_my_org(self) -> Organization:
        return self.client.organizations_api().find_organizations(
            org=self.org)[0]

    @staticmethod
    def log(args):
        print(">>>", args)

    @staticmethod
    def generate_name(prefix):
        assert prefix != "" or prefix is not None
        return prefix + str(datetime.datetime.now().timestamp()) + "-IT"

    @classmethod
    def retention_rule(cls) -> BucketRetentionRules:
        return BucketRetentionRules(type='expire', every_seconds=3600)

    def assertEqualIgnoringWhitespace(self, first, second, msg=None) -> None:
        whitespace_pattern = re.compile(r"\s+")
        self.assertEqual(whitespace_pattern.sub("", first),
                         whitespace_pattern.sub("", second),
                         msg=msg)
예제 #2
0
    def _connect(self):

        while not self.write_api:
            client = InfluxDBClient(url=self.url,
                                    token=self.auth_token,
                                    org=self.org)

            # get the orgID from the name:
            try:
                organizations_api = client.organizations_api()
                orgs = organizations_api.find_organizations()
            except:
                self.client = None
                logging.warning('Error connecting to the InfluxDB API. '
                                'Please confirm that InfluxDB is running and '
                                'that the authentication token is correct.'
                                'Sleeping before trying again.')
                time.sleep(5)
                continue

            # Look up the organization id for our org
            our_org = next((org for org in orgs if org.name == self.org), None)
            if not our_org:
                logging.fatal('Can not find org "%s" in InfluxDB', self.org)
                raise RuntimeError('Can not find org "%s" in InfluxDB' %
                                   self.org)
            self.org_id = our_org.id

            # get the bucketID from the name:
            bucket_api = client.buckets_api()
            bucket = bucket_api.find_bucket_by_name(self.bucket_name)

            # if the bucket does not exist then try to create it
            if bucket:
                self.bucket_id = bucket.id
            else:
                try:
                    logging.info('Creating new bucket for: %s',
                                 self.bucket_name)
                    new_bucket = bucket_api.create_bucket(
                        bucket_name=self.bucket_name, org_id=self.org_id)
                    self.bucket_id = new_bucket.id
                except:
                    logging.fatal('Can not create InfluxDB bucket "%s"',
                                  self.bucket_name)
                    raise RuntimeError('Can not create InfluxDB bucket "%s"' %
                                       self.bucket_name)

            self.write_api = client.write_api(write_options=ASYNCHRONOUS)
예제 #3
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self) -> bool:
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2', _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug', _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if not influxdb_options.get('enable', None):
            _LOGGER.warning("InfluxDB support is disabled in the YAML configuration file")
            return True

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url, token=self._token, org=self._org, enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)")
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            multisma2_debug = os.getenv(_DEBUG_ENV_VAR, 'False').lower() in ('true', '1', 't')
            try:
                if multisma2_debug and debug_options.get('delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(multisma2_debug and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'")
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
        except NewConnectionError:
            _LOGGER.error(f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_points(self, points):
        if not self._write_api:
            return False
        try:
            self._write_api.write(bucket=self._bucket, record=points, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_points(): {e}")

    def write_history(self, site, topic):
        if not self._write_api:
            return False

        lookup = LP_LOOKUP.get(topic, None)
        if not lookup:
            _LOGGER.error(f"write_history(): unknown topic '{topic}'")
            return False

        measurement = lookup.get('measurement')
        tags = lookup.get('tags', None)
        field = lookup.get('field', None)
        lps = []
        for inverter in site:
            inverter_name = inverter.pop(0)
            name = inverter_name.get('inverter', 'sunnyboy')
            for history in inverter:
                t = history['t']
                v = history['v']
                if v is None:
                    continue
                lp = f"{measurement}"
                if tags and len(tags):
                    lp += f",{tags[0]}={name}"
                if isinstance(v, int):
                    lp += f" {field}={v}i {t}"
                    lps.append(lp)
                else:
                    _LOGGER.error(
                        f"write_history(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                    continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            _LOGGER.debug(f"write_history({site}, {topic}): {lps}")
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_history(): {e}")

    def write_sma_sensors(self, sensor, timestamp=None):
        if not self._client:
            return False

        ts = timestamp if timestamp is not None else int(time.time())
        lps = []
        for old_point in sensor:
            point = old_point.copy()
            topic = point.pop('topic', None)
            point.pop('precision', None)
            if topic:
                lookup = LP_LOOKUP.get(topic, None)
                if not lookup:
                    _LOGGER.error(f"write_sma_sensors(): unknown topic '{topic}'")
                    continue

                if not lookup.get('output', False):
                    continue

                if topic == 'production/today':
                    day = datetime.datetime.fromtimestamp(ts).date()
                    dt = datetime.datetime.combine(day, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/month':
                    month = datetime.date.fromtimestamp(ts).replace(day=1)
                    dt = datetime.datetime.combine(month, datetime.time(0, 0))
                    ts = int(dt.timestamp())
                elif topic == 'production/year':
                    year = datetime.date.fromtimestamp(ts).replace(month=1, day=1)
                    dt = datetime.datetime.combine(year, datetime.time(0, 0))
                    ts = int(dt.timestamp())

                measurement = lookup.get('measurement')
                tags = lookup.get('tags', None)
                for k, v in point.items():
                    field = lookup.get('field')
                    # sample: dc_measurements
                    lp = f'{measurement}'
                    if tags and len(tags):
                        # sample: dc_measurements,_inverter=sb71
                        lp += f',{tags[0]}={k}'
                    if not field:
                        field = k
                    if isinstance(v, int):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v}i {ts}'
                        lps.append(lp)
                    elif isinstance(v, float):
                        # sample: ac_measurements,_inverter=sb71 power=0.23 1556813561098
                        lp += f' {field}={v} {ts}'
                        lps.append(lp)
                    elif isinstance(v, dict):
                        lp_prefix = f'{lp}'
                        for k1, v1 in v.items():
                            # sample: dc_measurements,_inverter=sb71
                            lp = f'{lp_prefix}'
                            if tags and len(tags) > 1:
                                # sample: dc_measurements,_inverter=sb71,_string=a
                                lp += f',{tags[1]}={k1}'
                            if isinstance(v1, int):
                                # sample: dc_measurements,_inverter=sb71,_string=a power=1000 1556813561098
                                lp += f' {field}={v1}i {ts}'
                                lps.append(lp)
                            elif isinstance(v1, float):
                                # sample: dc_measurements,_inverter=sb71,_string=a current=0.23 1556813561098
                                lp += f' {field}={v1} {ts}'
                                lps.append(lp)
                            else:
                                _LOGGER.error(
                                    f"write_sma_sensors(): unanticipated dictionary type '{type(v1)}' in measurement '{measurement}/{field}'")
                    else:
                        _LOGGER.error(
                            f"write_sma_sensors(): unanticipated type '{type(v)}' in measurement '{measurement}/{field}'")
                        continue

        try:
            self._write_api.write(bucket=self._bucket, record=lps, write_precision=WritePrecision.S)
            return True
        except ApiException as e:
            raise InfluxDBWriteError(f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBWriteError(f"Unexpected failure in write_sma_sensors(): {e}")

    def delete_bucket(self):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}")
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        if not self._client:
            return False
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(
                    bucket_name=self._bucket, org_id=self._org, retention_rules=None, org=None)
                if bucket:
                    _LOGGER.info(f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}")
        except NewConnectionError:
            raise
        except Exception as e:
            raise InfluxDBBucketError(f"Unexpected exception in connect_bucket(): {e}")
예제 #4
0
class InfluxDB:
    def __init__(self, config):
        self._config = config
        self._client = None
        self._write_api = None
        self._query_api = None
        self._delete_api = None
        self._tasks_api = None
        self._organizations_api = None
        self._token = None
        self._org = None
        self._url = None
        self._bucket = None

    def start(self):
        """Initialize the InfluxDB client."""
        try:
            influxdb_options = retrieve_options(self._config, 'influxdb2',
                                                _INFLUXDB2_OPTIONS)
            debug_options = retrieve_options(self._config, 'debug',
                                             _DEBUG_OPTIONS)
        except FailedInitialization as e:
            _LOGGER.error(f"{e}")
            return False

        if len(influxdb_options.keys()) == 0:
            raise FailedInitialization("missing 'influxdb2' options")

        result = False
        try:
            self._bucket = influxdb_options.get('bucket', None)
            self._url = influxdb_options.get('url', None)
            self._token = influxdb_options.get('token', None)
            self._org = influxdb_options.get('org', None)
            self._client = InfluxDBClient(url=self._url,
                                          token=self._token,
                                          org=self._org,
                                          enable_gzip=True)
            if not self._client:
                raise FailedInitialization(
                    f"failed to get InfluxDBClient from '{self._url}' (check url, token, and/or organization)"
                )
            self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
            self._query_api = self._client.query_api()
            self._delete_api = self._client.delete_api()
            self._tasks_api = self._client.tasks_api()
            self._organizations_api = self._client.organizations_api()

            cs_esphome_debug = os.getenv(_DEBUG_ENV_VAR,
                                         'False').lower() in ('true', '1', 't')
            try:
                if cs_esphome_debug and debug_options.get(
                        'delete_bucket', False):
                    self.delete_bucket()
                    _LOGGER.info(
                        f"Deleted bucket '{self._bucket}' at '{self._url}'")
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            try:
                if not self.connect_bucket(
                        cs_esphome_debug
                        and debug_options.get('create_bucket', False)):
                    raise FailedInitialization(
                        f"Unable to access (or create) bucket '{self._bucket}' at '{self._url}'"
                    )
            except InfluxDBBucketError as e:
                raise FailedInitialization(f"{e}")

            _LOGGER.info(
                f"Connected to InfluxDB: '{self._url}', bucket '{self._bucket}'"
            )
            result = True

        except FailedInitialization as e:
            _LOGGER.error(f" client {e}")
            self._client = None
        except NewConnectionError:
            _LOGGER.error(
                f"InfluxDB client unable to connect to host at {self._url}")
        except ApiException as e:
            _LOGGER.error(
                f"InfluxDB client unable to access bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            _LOGGER.error(f"Unexpected exception: {e}")
        finally:
            return result

    def stop(self):
        if self._write_api:
            self._write_api.close()
            self._write_api = None
        if self._client:
            self._client.close()
            self._client = None

    def bucket(self):
        return self._bucket

    def org(self):
        return self._org

    def write_api(self):
        return self._write_api

    def query_api(self):
        return self._query_api

    def delete_api(self):
        return self._delete_api

    def tasks_api(self):
        return self._tasks_api

    def organizations_api(self):
        return self._organizations_api

    def write_point(self, measurement, tags, field, value, timestamp=None):
        """Write a single sensor to the database."""
        timestamp = timestamp if timestamp is not None else int(time.time())
        lp_tags = ''
        separator = ''
        for tag in tags:
            lp_tags += f"{separator}{tag.get('t')}={tag.get('v')}"
            separator = ','
        lp = f"{measurement}," + lp_tags + f" {field}={value} {timestamp}"

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=lp,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_point(): {e}")

    def write_points(self, points):
        """Write a list of points to the database."""
        try:
            self._write_api.write(bucket=self._bucket,
                                  record=points,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_points(): {e}")

    def write_batch_sensors(self, batch_sensors, timestamp=None):
        """Write a batch of sensors to the database."""

        if len(batch_sensors) == 0:
            return

        timestamp = timestamp if timestamp is not None else int(time.time())

        batch = []
        for record in batch_sensors:
            sensor = record.get('sensor', None)
            state = record.get('state', None)
            measurement = sensor.get('measurement', None)
            device = sensor.get('device', None)
            location = sensor.get('location', None)
            precision = sensor.get('precision', None)
            if measurement is None or device is None:
                raise InfluxDBFormatError(
                    "'measurement' and/or 'device' are required")

            location_tag = '' if not location or not len(
                location) else f',_location={location}'
            device_tag = f',_device={device}'
            value = round(
                state, precision) if ((precision is not None)
                                      and isinstance(state, float)) else state
            lp = f'{measurement}{device_tag}{location_tag} sample={value} {timestamp}'
            batch.append(lp)

        try:
            self._write_api.write(bucket=self._bucket,
                                  record=batch,
                                  write_precision=WritePrecision.S)
        except ApiException as e:
            raise InfluxDBWriteError(
                f"InfluxDB client unable to write to '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBWriteError(
                f"Unexpected failure in write_batch_sensors(): {e}")

    def delete_bucket(self):
        try:
            buckets_api = self._client.buckets_api()
            found_bucket = buckets_api.find_bucket_by_name(self._bucket)
            if found_bucket:
                buckets_api.delete_bucket(found_bucket)
                bucket = buckets_api.find_bucket_by_name(self._bucket)
                if not bucket:
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to delete bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in delete_bucket(): {e}")

    def connect_bucket(self, create_bucket=False):
        try:
            buckets_api = self._client.buckets_api()
            bucket = buckets_api.find_bucket_by_name(self._bucket)
            if bucket:
                return True
            if create_bucket:
                bucket = buckets_api.create_bucket(bucket_name=self._bucket,
                                                   org_id=self._org,
                                                   retention_rules=None,
                                                   org=None)
                if bucket:
                    _LOGGER.info(
                        f"Created bucket '{self._bucket}' at {self._url}")
                    return True
            return False
        except ApiException as e:
            raise InfluxDBBucketError(
                f"InfluxDB client unable to create bucket '{self._bucket}' at {self._url}: {e.reason}"
            )
        except Exception as e:
            raise InfluxDBBucketError(
                f"Unexpected exception in connect_bucket(): {e}")
예제 #5
0
class InfluxDBWriter(Writer):
  """Write to the specified file. If filename is empty, write to stdout."""
  def __init__(self, bucket_name):
    """
    Write data records to the InfluxDB.
    ```
    bucket_name  the name of the bucket in InfluxDB.  If the bucket does
    not exists then this writer will try to create it.
    ```
    """
    super().__init__(input_format=Text)

    if not INFLUXDB_SETTINGS_FOUND:
      raise RuntimeError('File database/settings.py not found. '
                         'InfluxDB functionality is not available. Have '
                         'you copied over database/settings.py.dist '
                         'to database/settings.py and followed the '
                         'configuration instructions in it?')
    if not INFLUXDB_CLIENT_FOUND:
      raise RuntimeError('Python module influxdb_client not found. Please '
                         'install using "pip install influxdb_client" prior '
                         'to using InfluxDBWriter.')

    self.client = InfluxDBClient(url=INFLUXDB_URL, token=INFLUXDB_AUTH_TOKEN, org=INFLUXDB_ORG)

    # get the orgID from the name:
    try:
      self.organizations_api = self.client.organizations_api()
      orgs = self.organizations_api.find_organizations()
    except:
      raise RuntimeError('Error connecting to the InfluxDB API. '
                         'Please confirm that InfluxDB is running and '
                         'that the authentication token is correct.')

    our_org = next((org for org in orgs if org.name == INFLUXDB_ORG), None)

    if not our_org:
      raise RuntimeError('Can not find the organization "' + INFLUXDB_ORG + '" in InfluxDB')

    self.org_id = our_org.id

    # get the bucketID from the name:
    self.bucket_api = self.client.buckets_api()
    bucket = self.bucket_api.find_bucket_by_name(bucket_name)

    # if the bucket does not exist then try to create it
    if not bucket:
      try:
        new_bucket = self.bucket_api.create_bucket(bucket_name=bucket_name, org_id=self.org_id)
        logging.info('Creating new bucket for: %s', bucket_name)
        self.bucket_id = new_bucket.id
      except:
        raise RuntimeError('Can not create bucket in InfluxDB for ' + bucket_name)
    else:
      self.bucket_id = bucket.id

    self.write_api = self.client.write_api(write_options=ASYNCHRONOUS)

  ############################
  def write(self, record):
    """
    Note: Assume record is a dict or list of dict. Each dict contains a list
    of "fields" and float "timestamp" (UTC epoch seconds)
    """
    if record is None:
      return

    logging.info('InfluxDBWriter writing record: %s', record)

    if type(record) is not dict and type(record) is not list:
      logging.warning('InfluxDBWriter could not ingest record '
                      'type %s: %s', type(record), str(record))

    try:
      if type(record) is list:
        influxDB_record = map(lambda single_record: {"measurement": single_record['data_id'], "tags": {"sensor": single_record['data_id'] }, "fields": single_record['fields'], "time": int(single_record['timestamp']*1000000000) }, record)
      else:
        influxDB_record = {"measurement": record['data_id'], "tags": {"sensor": record['data_id'] }, "fields": record['fields'], "time": int(record['timestamp']*1000000000) }

      self.write_api.write(self.bucket_id, self.org_id, influxDB_record)
      return

    except:
      logging.warning('InfluxDBWriter could not ingest record '
                      'type %s: %s', type(record), str(record))
예제 #6
0
def find_my_org(client: InfluxDBClient, org_name: str) -> Organization:
    orgs = client.organizations_api().find_organizations()
    for org in orgs:
        if org.name == org_name:
            return org
예제 #7
0
class GlobalContainer(object):

    log_root = "logs"
    log_level = 30
    log_size = 5 * 1024 * 1024
    log_number = 10
    debug_mode = "True"

    influx_url = "http://*****:*****@localhost/{db_schema}')
            cmd = (
                f'mysql+mysqlconnector://{self.mysql_user}:{self.mysql_password}@{self.mysql_host}/{self.mysql_db}'
            )
            self.logger.debug(cmd)
            self.eng = create_engine(cmd)
            #Base = declarative_base()

            #base.Base.metadata.bind = eng
            #base.Base.metadata.create_all()
            common.base.Base.metadata.create_all(self.eng, checkfirst=True)

            Session = sessionmaker(bind=self.eng)
            self.ses = Session()
        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)
            sys.exit(99)

    def connectInfluxDatabase(self):

        try:
            # prepare database
            self.logger.debug(
                f'Connecting to Influx with: Host:{self.influx_host}, Port: {self.influx_port}, User: {self.influx_user}, DB: {self.influx_db}'
            )
            if (self.influx_version == 1):
                pass
                self.influxClient = DataFrameClient(self.influx_host,
                                                    self.influx_port,
                                                    self.influx_user,
                                                    self.influx_pwd,
                                                    self.influx_db)

            elif (self.influx_version == 2):

                retries = WritesRetry(total=20,
                                      backoff_factor=1,
                                      exponential_base=1)

                self.influxClient = InfluxDBClient(
                    url=f"http://{self.influx_host}:{self.influx_port}",
                    token=self.influx_token,
                    org=self.influx_org,
                    retries=retries,
                    timeout=180_000)

                self.influx_query_api = self.influxClient.query_api()

                self.influx_write_api = self.influxClient.write_api(
                    write_options=WriteOptions(
                        batch_size=500,
                        write_type=WriteType.synchronous,
                        flush_interval=10_000,
                        jitter_interval=2_000,
                        retry_interval=30_000,
                        max_retries=25,
                        max_retry_delay=60_000,
                        exponential_base=2))
                #self.influx_write_api = self.influxClient.write_api(write_options=SYNCHRONOUS)

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)
            sys.exit(99)

    def resetDatabases(self):
        try:
            self.logger.warning("Resetting Databases")

            self.resetMySQLDatabases()
            self.resetInfluxDatabases()

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def resetMySQLDatabases(self):
        try:
            self.logger.warning("Resetting MySQL-Database")

            #Base = declarative_base()
            common.base.Base.metadata.drop_all(self.eng, checkfirst=True)
            common.base.Base.metadata.create_all(self.eng, checkfirst=True)

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def resetInfluxDatabases(self):
        try:
            self.logger.warning("Resetting Influx-Database")

            if (self.influx_version == 1):
                self.influxClient.drop_database(self.influx_db)
                self.influxClient.create_database(self.influx_db)
            else:

                with InfluxDBClient(
                        url=f"http://{self.influx_host}:{self.influx_port}",
                        token=self.influx_token,
                        org=self.influx_org,
                        timeout=180_000) as client:

                    buckets_api = client.buckets_api()

                    my_bucket = buckets_api.find_bucket_by_name(self.influx_db)

                    if (my_bucket is not None):
                        buckets_api.delete_bucket(my_bucket)

                    org_name = self.influx_org
                    org = list(
                        filter(
                            lambda it: it.name == org_name,
                            self.influxClient.organizations_api().
                            find_organizations()))[0]
                    retention_rules = BucketRetentionRules(
                        type="forever",
                        every_seconds=0,
                        shard_group_duration_seconds=60 * 60 * 24 *
                        90)  #3600*24*365*200
                    created_bucket = buckets_api.create_bucket(
                        bucket_name=self.influx_db,
                        retention_rules=retention_rules,
                        org_id=org.id)

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)
            sys.exit(-99)

    def writeJobStatus(self,
                       Status,
                       StartDate=None,
                       EndDate=None,
                       statusMessage=None,
                       SuccessDate=None):
        try:
            jobStatus = None
            res = self.ses.query(ScriptStatus).filter(
                ScriptStatus.Name == self.jobName)

            if (res.count() == 0):
                self.logger.debug(
                    f'ScriptStatus {self.jobName} not found, creating...')
                jobStatus = ScriptStatus(self.jobName)

                self.ses.add(jobStatus)
                self.ses.commit()
            else:
                jobStatus = res.first()

            jobStatus.StatusDateTime = datetime.datetime.now()
            jobStatus.Status = Status

            if SuccessDate is not None:
                jobStatus.LastSuccessDateTime = SuccessDate

            if StartDate is not None:
                jobStatus.StartDateTime = StartDate

            if EndDate is not None:
                jobStatus.EndDateTime = EndDate

            if statusMessage is not None:
                jobStatus.StatusMessage = statusMessage

            jobStatus.ErrorNumbers = self.numErrors
            jobStatus.ErrorMessage = self.errMsg

            jobStatus.WarningNumbers = self.numWarnings
            jobStatus.WarningMessage = self.warnMsg

            self.ses.add(jobStatus)
            self.ses.commit()

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def chunk(self, seq, size):
        return (seq[pos:pos + size] for pos in range(0, len(seq), size))

    def writeJobMessage(self, logType, logObject, logObjectId, message):
        """ Writes a message into the log table
                logType: Error, Warning, Info, Debug
                logObject: Stock, Depot, Script,...
                LogObjectId: ISIN, Depot-Name,...
                Message: Message
        """
        try:
            jobMessage = LogMessage(self.runId, logType, logObject,
                                    logObjectId, message)

            self.ses.add(jobMessage)
            self.ses.commit()

        except Exception as e:
            self.logger.exception('Crash!', exc_info=e)

    def iQuery(self, qry):
        """Executes the flow query against the innodb"""

        loc = locals()
        logger = logging.getLogger(__name__)
        res = None

        try:
            msg = f"Starting iQuery with {loc}"
            logger.debug(msg)
            self.writeJobStatus("Running", statusMessage=msg)

            with InfluxDBClient(
                    url=f"http://{self.influx_host}:{self.influx_port}",
                    token=self.influx_token,
                    org=self.influx_org,
                    timeout=180_000) as client:
                res = client.query_api().query_data_frame(qry)

            self.writeJobStatus("Running", statusMessage=msg + " - DONE")
            logger.debug(msg + " - DONE")

            return res

        except Exception as e:
            logger.exception(f'Crash iQuery with {loc}!', exc_info=e)
            self.numErrors += 1
            self.errMsg += f"Crash iQuery with {loc}; "
예제 #8
0
class TimeseriesClient:
    host: Union[str, None]
    port: int
    token: Union[str, None]
    organization: str

    def __init__(
        self,
        host: str = None,
        port: int = None,
        organization: str = "GEWV",
        token: str = None,
        client: InfluxDBClient = None,
        verify_ssl: bool = True,
    ):
        if client is None:
            if host is None:
                raise Exception(
                    "Missing Host Address for Timeseries DB Client.")

            if port is None:
                raise Exception("Missing Port for Timeseries DB Client.")

            if token is None:
                raise Exception("Missing Token for Timeseries DB Client.")

            protocol = "https" if verify_ssl else "http"

            self._client = InfluxDBClient(
                url=f"{protocol}://{host}:{port}",
                token=token,
                verify_ssl=verify_ssl,
            )

            if len(organization) != 16:
                # receive id of the org and store the info
                self._org_api = self._client.organizations_api()
                self._org_id = self.get_org_id_by_name(org_name=organization)

                if self._org_id is None:
                    raise Exception(
                        f"The organization {organization} dont exists in InfluxDB. Break execution."
                    )

                self._client.org = self._org_id
            else:
                self._client.org = organization
        else:
            self._client = client

        self._org_api = self._client.organizations_api()
        self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
        self._query_api = self._client.query_api()
        self._bucket_api = self._client.buckets_api()

        self._grafana_api = GrafanaApi(host=host, port=3000, use_tls=False)

    @staticmethod
    def from_env_properties():
        client = InfluxDBClient.from_env_properties()
        return TimeseriesClient(client=client)

    def health(self):
        return self._client.health()

    def get_org_id_by_name(self, org_name: str) -> Union[str, None]:
        orgs: List[Organization] = self._org_api.find_organizations()
        for org in orgs:
            if org.name == org_name:
                return org.id

        return None

    def create_bucket(self, bucket: str):
        try:
            self._bucket_api.create_bucket(bucket_name=bucket)
        except ApiException as err:
            if err.status != 422:
                raise

    def exist_bucket(self, bucket: str):
        return self._bucket_api.find_bucket_by_name(bucket_name=bucket)

    def get_bucket_by_name(self, bucket_name: str):
        return self._bucket_api.find_bucket_by_name(bucket_name=bucket_name)

    def delete_bucket(self, bucket: str):
        bucket_id = self.get_bucket_by_name(bucket_name=bucket)
        return self._bucket_api.delete_bucket(bucket=bucket_id)

    def get_grafana_orgs(self) -> List[GrafanaOrganization]:
        return self._grafana_api.get_organizations()

    def get_grafana_org(self, org_name: str) -> GrafanaOrganization:
        return self._grafana_api.get_organization_by_name(org_name=org_name)

    def create_grafana_org(self, org_name: str):
        return self._grafana_api.create_organization(org_name=org_name)

    def delete_grafana_org(self, org_name: str):
        org = self.get_grafana_org(org_name=org_name)

        if org is None:
            raise Exception(
                f"Cant delete grafana org {org_name}. Org not exist!")

        return self._grafana_api.delete_organization(org["id"])

    def create_project(self, project_name: str):
        # Steps
        # 1. create new bucket
        # 2. create token for bucket
        # 3. create new org in grafana
        # 4. create new source in grafana
        pass

    def get_points(
        self,
        **kwargs,
    ) -> List[FluxTable]:
        if not self.health:
            raise Exception("Influx DB is not reachable or unhealthy.")

        tables = self._query_api.query(query=self.build_query(**kwargs))

        return tables

    def get_dataframe(self, **kwargs):
        return self.query_dataframe(flux_query=self.build_query(**kwargs))

    def query_dataframe(
        self,
        flux_query: str,
    ):
        """
        with this function you can send a own query to InfluxDB and
        you will get back a dataframe with datetimeindex
        """

        if not self.health:
            raise Exception("Influx DB is not reachable or unhealthy.")

        df = cast(
            DataFrame,
            self._query_api.query_data_frame(query=flux_query),
        )

        if "_time" in df.columns:
            df = df.set_index(pd.to_datetime(df["_time"]))

        return df

    def write_points(self, project: str, points: List[Point]):
        self._write_api.write(bucket=project, record=points)

    def write_a_dataframe(
        self,
        project: str,
        measurement_name: str,
        dataframe: pd.DataFrame,
        tag_columns: List[str] = [],
        additional_tags: Dict[str, str] = None,
    ):
        """
        Write a pandas dataframe to the influx db. You can define some
        tags, that are appended to every entry.
        """

        if additional_tags is None:
            self._write_api.write(
                bucket=project,
                record=dataframe,
                data_frame_measurement_name=measurement_name,
                data_frame_tag_columns=tag_columns,
            )
            return

        tags_dataframe = pd.DataFrame(index=dataframe.index)

        # create the dataframe with the tags
        for tag_name, tag in additional_tags.items():
            tag_columns.append(tag_name)
            tags_dataframe[tag_name] = [tag] * len(dataframe)

        combined_frames = pd.concat([dataframe, tags_dataframe], axis=1)

        self._write_api.write(
            bucket=project,
            record=combined_frames,
            data_frame_measurement_name=measurement_name,
            data_frame_tag_columns=tag_columns,
        )

    def build_query(
        self,
        project: str,
        fields: Dict[str, str] = {},
        start_time: Optional[datetime] = None,
        end_time: Optional[datetime] = None,
        precision: str = "5m",
    ) -> str:

        query = f"""
            from(bucket: "{project}")
        """

        if start_time is not None and end_time is not None:
            self.test_datetime(start_time)
            self.test_datetime(end_time)

            query += f"""
                |> range(start: {start_time.isoformat()}, stop: {end_time.isoformat()})
            """
        elif start_time is not None:
            self.test_datetime(start_time)

            query += f"""
                |> range(start: {start_time.isoformat()})
            """

        elif end_time is not None:
            self.test_datetime(end_time)

            query += f"""
                |> range(stop: {end_time.isoformat()})
            """

        for f, v in fields.items():
            query += f"""
                |> filter(fn: (r) => r["{f}"] == "{v}")
            """

        query += f"""
            |> aggregateWindow(every: {precision}, fn: mean, createEmpty: true)
            |> yield(name: "mean")
        """

        return query

    @staticmethod
    def test_datetime(dt: datetime):
        if not isinstance(dt, datetime):
            raise Exception(
                f"The delivered datetime {dt} is not from type datetime.")

        if dt.tzinfo is None:
            raise Exception(
                f"The time {dt.isoformat()} has no timezone info. That is necassary."
            )