Ejemplo n.º 1
0
def write_influxDB():
    import influxdb_client
    from influxdb_client.client.write_api import SYNCHRONOUS

    import random

    #    bucket = "HRWEB"
    #    org = "PM72"
    #    token = "3sLsq9ECi2eSQEYQQjIdxZsTuV6NtFcaohVKzNeILEo5hOPGCRt0Mmgzug_8iai9fCNfbUD1s3wAYd5LAXHOjg=="
    ## Store the URL of your InfluxDB instance
    #
    #    url="https://eu-central-1-1.aws.cloud2.influxdata.com/"

    client = influxdb_client.InfluxDBClient(url=URL, token=TOKEN, org=ORG)

    write_api = client.write_api(write_options=SYNCHRONOUS)

    npoints = tdicprsar['number_of_points_write']
    tdicprsar.update({'hr_measurement': MEASUREMENT})
    min = -5
    max = 15
    for item in range(tdicprsar['number_of_points_write']):
        hr = 70 + random.randint(min, max)
        p = influxdb_client.Point(tdicprsar['hr_measurement']).tag(
            "username",
            tdicprsar['username']).tag("location", tdicprsar['location']).tag(
                "conditions",
                tdicprsar['conditions']).field("hr_per_minute", hr)
        write_api.write(bucket=BUCKET, org=ORG, record=p)
Ejemplo n.º 2
0
def get_db_value(
    config,
    hostname,
    measure,
    param,
):
    client = influxdb_client.InfluxDBClient(
        url=config["influxdb"]["url"],
        token=config["influxdb"]["token"],
        org=config["influxdb"]["org"],
    )

    query_api = client.query_api()

    table_list = query_api.query(
        query=FLUX_QUERY.format(
            bucket=config["influxdb"]["bucket"],
            measure=measure,
            hostname=hostname,
            param=param,
            period="1h",
        )
    )

    return table_list[0].records[0].get_value()
Ejemplo n.º 3
0
    def __init__(self):
        import influxdb_client
        from influxdb_client.client.write_api import SYNCHRONOUS

        self.db_params = runtime_data.parameters['Database']

        if runtime_data.parameters:
            self.evo_bucket = self.db_params["influxdb_evolutionary_bucket"]
            self.stats_bucket = self.db_params["influxdb_stats_bucket"]
            self.org = self.db_params["influxdb_organization"]
            self.token = self.db_params["influxdb_token"]

            # todo: db address needs to be def from a config file instead
            print('Running in container: ', runtime_data.running_in_container)
            if runtime_data.running_in_container:
                self.url = self.db_params['influxdb_url']
            else:
                self.url = "http://127.0.0.1:8086"

            try:
                print(
                    "\n\n\nAttempting to connect to influxDb service on %s...\n\n\n"
                    % self.url)
                self.client = influxdb_client.InfluxDBClient(url=self.url,
                                                             token=self.token,
                                                             org=self.org)
                self.write_client = self.client.write_api(
                    write_options=SYNCHRONOUS)
            except:
                print("ERROR: Influx service is not running!!!")
        else:
            print("ERROR: Parameters are not set for InfluxDb configuration!")
Ejemplo n.º 4
0
	def __init__(self, rover, sensor):
		self.rover = rover  #rover name (roger) is the influxdb database
		self.sensor = sensor
		self.measurement = "sensor"  # the measurement will always be sensor, this is so we can store other kinds
		self.url = "http://192.168.86.39:8086"
		self.token = ""
		client = influxdb_client.InfluxDBClient(url=self.url, token=self.token)
		self.write_api = client.write_api(write_options=SYNCHRONOUS)
		self.tags = {"sensor": sensor}
Ejemplo n.º 5
0
 def __init__(self, config, debug=False):
     self.client = influxdb_client.InfluxDBClient(
         url=f'http://{config["addr"]}',
         token=config['token'],
         org=config["org"],
         debug=debug)
     self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
     self.query_api = self.client.query_api()
     self.timezone = config['timezone']
     self.bucket = config['bucket']
Ejemplo n.º 6
0
def readFromDBworks(string, bucket, org, url, token):
    client = influxdb_client.InfluxDBClient(url=url, token=token, org=org)
    query_api = client.query_api()
    query = 'from(bucket:"market")\
    |> range(start: -10d)'

    ## Using Table Structure
    result = query_api.query(org=org, query=query)
    results = []

    for table in result:
        for record in table.records:
            print(record.values)
Ejemplo n.º 7
0
def read_influxDB(points_list):
    import influxdb_client
    from influxdb_client.client.write_api import SYNCHRONOUS

    #    bucket = "HRWEB"
    #    org = "PM72"
    #    token = "3sLsq9ECi2eSQEYQQjIdxZsTuV6NtFcaohVKzNeILEo5hOPGCRt0Mmgzug_8iai9fCNfbUD1s3wAYd5LAXHOjg=="
    ## Store the URL of your InfluxDB instance
    #
    #    url="https://eu-central-1-1.aws.cloud2.influxdata.com/"

    client = influxdb_client.InfluxDBClient(url=URL, token=TOKEN, org=ORG)
    query_api = client.query_api()
    username = '******' + tdicprsar['username'] + '"'
    bucket = '"' + BUCKET + '"'
    measurement = '"' + MEASUREMENT + '"'
    #query = 'from(bucket:"HRWEB")\
    #|> range(start: -10m)\
    #|> filter(fn:(r) => r._measurement == "hr_measurement")\
    #|> filter(fn:(r) => r.username == "TestUser2")\
    #|> filter(fn:(r) => r.location == "Новосибирск")\
    #|> filter(fn:(r) => r._field == "hr_per_minute" )'

    query = 'from(bucket:' + bucket + ')\
    |> range(start: -10m)\
	|> filter(fn:(r) => r._measurement == ' + measurement + ')\
    |> filter(fn:(r) => r.username == ' + username + ')\
    |> filter(fn:(r) => r.location == "Новосибирск")\
    |> filter(fn:(r) => r._field == "hr_per_minute" )'

    result = client.query_api().query(org=ORG, query=query)

    results = []
    for table in result:
        for record in table.records:
            results.append((record.values.get('location'), record.get_time(),
                            record.get_field(), record.get_value()))

    delta = timedelta(hours=7, minutes=0)
    for item in results:

        points_list.append("Место " + str(item[0]) + " Дата, время" +
                           (item[1] + delta).strftime('%d/%m/%Y, %H:%M:%S') +
                           " Имя поля " + str(item[2]) + " Значение " +
                           str(item[3]))

    tdicprsar.update({'number_of_points_read': len(results)})
    tdicprsar.update({'points_list': points_list})

    return  #points_list
Ejemplo n.º 8
0
def main():
    global hostname
    global client_write
    client = influxdb_client.InfluxDBClient(
        url=os.environ.get("INFLUXDB_URL"),
        token=os.environ.get("INFLUXDB_TOKEN"),
        org=os.environ.get("INFLUXDB_ORG"),
    )
    client_write = client.write_api(
        write_options=influxdb_client.client.write_api.SYNCHRONOUS)
    hostname = os.environ.get("HOST")
    print(hostname)
    vcgm = vcgencmd.Vcgencmd()

    loop(client, vcgm)
def _get_influxdb_client() -> Optional[influxdb_client.InfluxDBClient]:
    metrics_config = get_metrics_config()
    influxdb_config = metrics_config.get("influxdb")

    if influxdb_config is None:
        return None

    url = get_value_with_env_override(influxdb_config, "url")
    token = get_value_with_env_override(influxdb_config, "token")
    org = get_value_with_env_override(influxdb_config, "url")

    if url is None or token is None or org is None:
        return None

    return influxdb_client.InfluxDBClient(url=url, token=token, org=org)
Ejemplo n.º 10
0
    def wrapper():
        """The wrapper which makes the link to influx fb

        """
        data = func()
        load_dotenv()

        BUCKET = getenv("BUCKET")
        ORG = getenv("ORG")
        TOKEN = getenv("INFLUXDB_TOKEN")
        URL = getenv("URL")

        client = influxdb_client.InfluxDBClient(url=URL, token=TOKEN, org=ORG)
        write_api = client.write_api(write_options=SYNCHRONOUS)
        write_api.write(bucket=BUCKET, org=ORG, record=data)
 def connection(self):
     """Provide InfluxDB client write_api"""
     retries = influxdb_client.client.write.retry.WritesRetry(
         total=3,
         backoff_factor=1,
         exponential_base=2,
     )
     with influxdb_client.InfluxDBClient(
         url=self._database_cfg["url"],
         token=self._database_cfg["token"],
         org=self._database_cfg["org"],
         retries=retries,
     ) as client, client.write_api(
         write_options=influxdb_client.client.write_api.SYNCHRONOUS
     ) as write_api:
         yield write_api
Ejemplo n.º 12
0
def readtickerFromDB(string, bucket, org, url, token):
    client = influxdb_client.InfluxDBClient(url=url, token=token, org=org)
    query_api = client.query_api()
    query = 'from(bucket:"' + bucket + '")\
    |> range(start: -366d)\
    |> filter(fn:(r) => r.ticker == "' + string + '")\
    |> filter(fn:(r) => r._field == "close")'

    ## Using Table Structure
    print(query)
    result = query_api.query(org=org, query=query)
    results = []

    for table in result:
        for record in table.records:
            print(record.values)
Ejemplo n.º 13
0
    def run(self, timeseries):
        print('Neptune insights starting...')
        print('Listening for http requests on port 5000...')
        self.webserver = WebServer(self)
        self.webserver.start(5000)

        print('Connecting to timeseries at: ' + timeseries + '...')
        self.influx = influxdb_client.InfluxDBClient(
            url=timeseries,
            token=
            'Ku-vr2Vu70U47XRsUhNBRB2LoCkoSAQNEEzFc8Mncw72MLvQwaQf6ct0QERwzbN7Mhy8F16apCkkR5Obg0zhaw==',
            org='neptune')
        self.metrics = metrics.Metrics(self.influx)

        print('Neptune insights is started.')
        return self
Ejemplo n.º 14
0
def connect():
    global _client
    global _cur_influxdb_url
    global _cur_influxdb_org
    global _cur_influxdb_bucket
    global _cur_influxdb_token

    influxdb_url = settings.app.influxdb_url
    influxdb_org = settings.app.influxdb_org
    influxdb_bucket = settings.app.influxdb_bucket
    influxdb_token = settings.app.influxdb_token
    if influxdb_url == _cur_influxdb_url and \
            influxdb_org == _cur_influxdb_org and \
            influxdb_bucket == _cur_influxdb_bucket and \
            influxdb_token == _cur_influxdb_token:
        return

    if not influxdb_url:
        _queue_lock.acquire()
        try:
            _client = None
        finally:
            _queue_lock.release()
        _cur_influxdb_url = influxdb_url
        _cur_influxdb_org = influxdb_org
        _cur_influxdb_bucket = influxdb_bucket
        _cur_influxdb_token = influxdb_token
        return

    logger.info(
        'Connecting to InfluxDB',
        'monitoring',
        influxdb_url=influxdb_url,
        influxdb_org=influxdb_org,
        influxdb_bucket=influxdb_bucket,
    )

    _client = influxdb_client.InfluxDBClient(
        url=influxdb_url,
        org=influxdb_org,
        token=influxdb_token,
    )

    _cur_influxdb_url = influxdb_url
    _cur_influxdb_org = influxdb_org
    _cur_influxdb_bucket = influxdb_bucket
    _cur_influxdb_token = influxdb_token
Ejemplo n.º 15
0
 def __connect_to_db(self, url, token):
     if self.__db_writer:
         self.__db_writer.close()
     if self.__influx_db_client:
         self.__influx_db_client.close()
     self.__influx_db_client = influxdb2.InfluxDBClient(url=url,
                                                        token=token,
                                                        org='Anubi')
     write_opt = influxdb2.client.write_api.WriteOptions(batch_size=10)
     point_opt = influxdb2.client.write_api.PointSettings(
         **{
             'Device': 'rpi-zw',
             'Location': 'Bedroom'
         })
     self.__db_writer = self.__influx_db_client.write_api(
         write_options=write_opt, point_settings=point_opt)
     if self.__is_db_connected:
         self.__logger.info('Connected to database ' + url)
Ejemplo n.º 16
0
    def connect(self):
        """
        Connect to InfluxDB server database

        :return: connection: a new connection object to the database
        """

        self.connection = None

        try:
            self.connection = influxdb_client.InfluxDBClient(
                url=get_variable('URL'),
                token=get_variable('TOKEN'),
                org=get_variable('ORGANIZATION'),
                debug=False).write_api(write_options=SYNCHRONOUS)

        except urllib3.exceptions.HTTPError as e:
            raise RecoverableConnectorError(
                f"Failed to connect to database: {e}")
Ejemplo n.º 17
0
 def __init__(self,
              _influx_host="localhost",
              _influx_port=8086,
              _influx_org=None,
              _influx_token=None,
              _influx_bucket=None,
              _debug_on=False,
              _exit_on_fail=False):
     self.influx_host = _influx_host
     self.influx_port = _influx_port
     self.influx_org = _influx_org
     self.influx_token = _influx_token
     self.influx_bucket = _influx_bucket
     self.url = "http://%s:%s" % (self.influx_host, self.influx_port)
     self.client = influxdb_client.InfluxDBClient(url=self.url,
                                                  token=self.influx_token,
                                                  org=self.influx_org,
                                                  debug=_debug_on)
     self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
Ejemplo n.º 18
0
def insert_sensordatarow(sensor, location, dblvalueraw, value2):
    """
    Insert statment for sensor row data - One row
    :param sensor:
    :param location:
    :param dblvalue_raw:
    :param value2:
    """
    #    query = "INSERT INTO SensorData(sensor, location, dblvalueraw, value2) " \
    #            "VALUES(%s, %s, %d, %s)"
    #    args = (sensor, location, dblvalue_raw, value2)

    try:
        # connect to influxdb and insert a point = row
        db_config = read_db_config()
        #print(db_config)
        client = influxdb_client.InfluxDBClient(url=db_config.get("url"),
                                                token=db_config.get("token"),
                                                org=db_config.get("org"))

        if client is None:

            print('Connection failed.')
        else:
            print('Connection established.')

        write_api = client.write_api(write_options=SYNCHRONOUS)
        # write a point or row to influxdb
        p = influxdb_client.Point("SendorData").tag("location", location).tag(
            "sensor", sensor).field("value",
                                    dblvalueraw).time(datetime.now(),
                                                      WritePrecision.MS)
        write_api.write(bucket=bucket, org=org, record=p)

    except client is None:
        print("Connection Failed with error: ")

    finally:
        """
        Close client
        """
        client.__del__()
Ejemplo n.º 19
0
def readFromDB(days, bucket, org, url, token):
    client = influxdb_client.InfluxDBClient(url=url, token=token, org=org)
    query_api = client.query_api()
    query = 'from(bucket:"' + bucket + '")\
    |> range(start: -' + str(days) + 'd)\
    |> filter(fn:(r) => r._field == "close")'

    ## Using Table Structure
    print(query)
    result = query_api.query(org=org, query=query)
    results = []

    for table in result:
        for record in table.records:
            results.append((record.get_time().strftime('%Y-%m-%d'),
                            record.values.get("ticker"), record.get_field(),
                            record.get_value()))

#   print(results)
    return results
Ejemplo n.º 20
0
def update_sensordatarows(id, sensor, location, dblvalueraw, value2):
    """
    update for multiple rows
    :param id:
    :param sensor:
    :param location:
    :param dblvalue_raw:
    :param value2:
    """
    # read database configuration
    db_config = read_db_config()

    # prepare query and data
    query = """ UPDATE SensorData
                SET location = %s
                WHERE id = %s """

    data = (location, id)

    try:
        # connect to influxdb and insert a point = row
        db_config = read_db_config()
        client = influxdb_client.InfluxDBClient(url=url, token=token, org=org)
        if client is None:
            print('Connection failed.')
        else:
            print('Connection established.')

        # prepare query and data
        query = """ UPDATE SensorData
                SET location = %s
                WHERE id = %s """

        data = (location, id)

    except:
        pass

    finally:
        print('update_sensordatarows - Connection Closed.')
        client.__del__()
def send_to_influx(influx_payload, influx_url, influx_bucket_id, influx_token,
                   influx_org):

    client = influxdb_client.InfluxDBClient(url=influx_url,
                                            token=influx_token,
                                            org=influx_org)

    write_api = client.write_api(write_options=SYNCHRONOUS)
    try:
        write_api.write(bucket=influx_bucket_id, record=influx_payload)
        text = "Data send"
        status_code = HTTP_CODE_OK
    except Exception as e:
        text = str(e)
        status_code = HTTP_CODE_ERROR

    #clossing connections
    write_api.close()
    client.close()

    return text, status_code
Ejemplo n.º 22
0
def parse(files):

    fn = []
    for f in files:
        fn.extend(glob.glob(f))

    username = '******'
    password = '******'

    database = 'sots'
    retention_policy = 'autogen'

    bucket = f'{database}/{retention_policy}'

    client = influxdb_client.InfluxDBClient(url='http://localhost:8086',
                                            token=f'{username}:{password}',
                                            org='-')

    # bucket = "sots"
    # org = "abos"
    # token = "-SmyZoCtOefu_-_TBws9e28bT7KV7gwISMC0s3B8jgCOYckv-vujCBkrKblWWA15elwqwMHJeCOxkvrrHwvyMw=="
    #
    # client = influxdb_client.InfluxDBClient(url="http://localhost:9999", token=token, org=org)
    write_api = client.write_api(write_options=ASYNCHRONOUS)

    for filepath in fn:
        print('file name', filepath)

        nc = Dataset(filepath, 'r')
        post_id = postgres_insert(nc)

        # get time variable
        vs = nc.get_variables_by_attributes(standard_name='time')
        nctime = vs[0]
        t_unit = nctime.units  # get unit  "days since 1950-01-01T00:00:00Z"

        try:
            t_cal = nctime.calendar
        except AttributeError:  # Attribute doesn't exist
            t_cal = u"gregorian"  # or standard

        dt_time = [num2date(t, units=t_unit, calendar=t_cal) for t in nctime]

        print('time variable', nctime.name)
        time_dims = nctime.get_dims()
        time_dims_name = time_dims[0].name
        print('time dimension(0)', time_dims_name)

        z_coords = nc.get_variables_by_attributes(axis='Z')
        #print('z coords', z_coords)
        nom_depth = None
        try:
            nom_depth_var = z_coords[0]  # TODO not take first one
            nom_depth = nom_depth_var[:].data
        except KeyError:
            pass

        coords = None
        time_vars_name = []
        for v in nc.variables:
            if v != nctime.name:
                dim_names = [d.name for d in nc.variables[v].get_dims()]
                print('variable ', v, dim_names)
                if time_dims_name in dim_names:
                    print(' has time dimension')
                    time_vars_name.append(v)
            if 'coordinates' in nc.variables[v].ncattrs():
                #print('coord', nc.variables[v].ncattrs())
                coords = nc.variables[v].getncattr('coordinates')

            print(' coords:', coords)

        #print('time vars', time_vars)

        # remove an auxiliary variables from the list to plot
        aux_vars = list()
        for var in nc.variables:
            try:
                aux_vars.extend(nc.variables[var].getncattr(
                    'ancillary_variables').split(' '))
            except AttributeError:
                pass

        for var in aux_vars:
            print('remove aux', var)
            time_vars_name.remove(var)

        print('time vars not aux', time_vars_name)
        time_vars = []
        for v in time_vars_name:
            data = nc.variables[v]
            qc = None
            if v + "_quality_control" in nc.variables:
                qc = nc.variables[v + "_quality_control"]
            time_vars.append({
                'var': data,
                'time_dim': data.dimensions.index('TIME'),
                'qc': qc
            })

        date_time_start = datetime.datetime.strptime(
            nc.getncattr('time_deployment_start'), '%Y-%m-%dT%H:%M:%SZ')
        date_time_end = datetime.datetime.strptime(
            nc.getncattr('time_deployment_end'), '%Y-%m-%dT%H:%M:%SZ')

        point = {
            'measurement': nc.platform_code,
            'tags': {
                'file_id': post_id,
                'site': nc.deployment_code,
                'nominal_depth': nom_depth
            }
        }
        for n in range(0, len(time_dims[0])):
            if (dt_time[n] > date_time_start) & (dt_time[n] < date_time_end):
                print(n, dt_time[n], nom_depth)
                point['time'] = dt_time[n]
                fields = {}
                for v in time_vars:
                    qc = 0
                    if v['qc']:
                        qc = v['qc'][n]
                    if qc <= 2:
                        print('field ', v['var'].name, v['time_dim'], qc)
                        if v['time_dim'] == 0:
                            fields[v['var'].name] = np.float(v['var'][n].data)
                        elif v['time_dim'] == 1:
                            fields[v['var'].name] = np.float(v['var'][0,
                                                                      n].data)

                point['fields'] = fields

                print('point', point)

                #write_api.write(bucket=bucket, org=org, record=point)
                write_api.write(bucket=bucket,
                                record=point,
                                write_precision='s')

        write_api.flush()

        nc.close()

    return None
Ejemplo n.º 23
0
def main():
    import argparse

    ###
    # parse command-line arguments
    ###
    parser = argparse.ArgumentParser()
    parser.add_argument('-d',
                        '--debug',
                        help='Debug mode.',
                        action='store_true')
    parser.add_argument('-q',
                        '--quiet',
                        help='Quiet mode, less output.',
                        action='store_true')

    parser.add_argument(
        '-t',
        '--tty',
        metavar='DEV',
        default='/dev/ttyUSB0',
        help='Serial port, [def: /dev/ttyUSB0]',
    )

    parser.add_argument('-s',
                        '--sleep',
                        metavar='SEC',
                        default=15,
                        type=int,
                        help='Time to sleep between queries.')
    parser.add_argument(
        '-B',
        '--batch-submit',
        metavar='N',
        default=5,
        type=int,
        help='Only submit in batches of N to database. [def: %(default)d]')

    parser.add_argument('-w',
                        '--webserver',
                        metavar='PORT',
                        default=None,
                        type=int,
                        help='''Run webserver to submit queries on
http://localhost:PORT/query/address/length_or_tag where length may be one
of the allowed data types (e.g. degC, uint8, ...) or number of bytes to read.
[def: off]''')

    grp = parser.add_argument_group('InfluxDB Related')
    grp.add_argument('-i',
                     '--influxdb-url',
                     metavar='URL',
                     type=str,
                     default='http://127.0.0.1:8086/',
                     help='Influxdb database url use [def: %(default)s]')
    grp.add_argument(
        '-T',
        '--influxdb-token-file',
        metavar='FILE',
        type=Path,
        default='/usr/local/lib/py-viessmann-log/influxdb.token',
        help='Path with influxdb token (1 line). [def: %(default)s]')
    grp.add_argument('-o',
                     '--influxdb-org',
                     metavar='org',
                     type=str,
                     default='vogel.cx',
                     help='Influxdb org. [def: %(default)s]')
    grp.add_argument('-b',
                     '--influxdb-bucket',
                     metavar='bucket',
                     default='heating',
                     type=str,
                     help='Influxdb bucket [def: %(default)s]')
    grp.add_argument('-m',
                     '--influxdb-measurement',
                     metavar='MEASNAME',
                     default='optolink',
                     help='Influxdb measurement name to use [def: optolink]')

    parser.add_argument('variablelist',
                        help='''File with variables to query regularly.''')

    args = parser.parse_args()

    lvl = logging.INFO
    if args.quiet:
        lvl = logging.WARNING
    if args.debug:
        lvl = logging.DEBUG

    logging.basicConfig(level=lvl, format='%(asctime)-15s %(message)s')

    loop = asyncio.get_event_loop()

    ###
    # read list of measurements
    ###
    variablelist = viessmann_decode.load_variable_list(args.variablelist)

    ###
    # serial interface
    ###
    vito_transp, vito_proto = loop.run_until_complete(
        serial_asyncio.create_serial_connection(loop,
                                                vitotronic.VitoTronicProtocol,
                                                args.tty,
                                                baudrate=4800,
                                                bytesize=8,
                                                parity='E',
                                                stopbits=2))

    ###
    # influxdb
    ###
    influx_client = None
    if args.influxdb_url and args.influxdb_url != '-':
        token = args.influxdb_token_file.open().readline().strip()
        influx_client = influxdb_client.InfluxDBClient(url=args.influxdb_url,
                                                       token=token)

    poll_mainloop = PollMainLoop(vito_proto, influx_client, variablelist, args)
    loop.create_task(poll_mainloop.tick())

    if args.webserver:
        webapp = web.Application()
        webapp.add_routes([
            web.get('/query/{addr}/{tag_or_len}',
                    poll_mainloop.handle_web_query),
        ])

        web.run_app(webapp, port=args.webserver)  # includes loop.run_forever()
    else:
        loop.run_forever()
Ejemplo n.º 24
0
def read_influxDB_DataFrame(points_list, *args, **params_dict):
    import influxdb_client
    from influxdb_client.client.write_api import SYNCHRONOUS

    client = influxdb_client.InfluxDBClient(url=URL, token=TOKEN, org=ORG)
    query_api = client.query_api()
    username = '******' + tdicprsar['username'] + '"'
    bucket = '"' + BUCKET + '"'
    measurement = '"' + MEASUREMENT + '"'
    start_datatime = str(params_dict['time_of_begin_DF'])
    end_datatime = str(params_dict['time_of_end_DF'])
    # формируем строку запроса с фильтрами по имени пользователя
    names_of_user = params_dict['names_of_user']
    #names_of_user.append("TestUser9")
    #names_of_user.append("TestUser1")
    names_of_user_str = "("
    for item in names_of_user:
        names_of_user_str = names_of_user_str + "r.username ==" + '"' + item + '"' + " or "
    len_str = len(names_of_user_str) - 4
    names_of_user_str = names_of_user_str[:len_str] + ")"
    tdicprsar.update({'TestNSQ': names_of_user_str})
    # формируем строку запроса с фильтрами по локации
    names_of_locations = params_dict['names_of_locations']
    names_of_locations_str = "("
    for item in names_of_locations:
        names_of_locations_str = names_of_locations_str + "r.location ==" + '"' + item + '"' + " or "
    len_str = len(names_of_locations_str) - 4
    names_of_locations_str = names_of_locations_str[:len_str] + ")"
    tdicprsar.update({'TestLSQ': names_of_locations_str})
    # формируем строку запроса с фильтрами по условиям
    names_of_conditions = params_dict['names_of_conditions']
    names_of_conditions_str = "("
    for item in names_of_conditions:
        names_of_conditions_str = names_of_conditions_str + "r.conditions ==" + '"' + item + '"' + " or "
    len_str = len(names_of_conditions_str) - 4
    names_of_conditions_str = names_of_conditions_str[:len_str] + ")"
    tdicprsar.update({'TestСSQ': names_of_conditions_str})
    names_of_all_str = names_of_user_str + " and " + names_of_locations_str + " and " + names_of_conditions_str
    tdicprsar.update({'TestASQ': names_of_all_str})
    tdicprsar.update({'TestS_DF': start_datatime})
    tdicprsar.update({'TestE_DF': end_datatime})

    query = 'from(bucket:' + bucket + ')\
    |> range(start: ' + start_datatime + ', stop: ' + end_datatime + ')\
    |> filter(fn:(r) => r._measurement == ' + measurement + ')\
    |> filter(fn:(r) => ' + names_of_all_str + ')'

    tdicprsar.update({'TestA': query})

    result = client.query_api().query_data_frame(org=ORG, query=query)
    #result = []
    tdicprsar.update({'TestRR': len(result)})

    #results = []
    #for table in result:
    #    for record in table.records:
    #        results.append((record.values.get('location'), record.values.get('username'), record.values.get('conditions'), #record.get_time(), record.get_field(), record.get_value()))

    #tdicprsar.update({'TestRT':len(results)})
    ##tdicprsar.update({'TestTK':schema.tagKeys(bucket: "HRWEB")})

    delta = timedelta(hours=7, minutes=0)
    #for item in results:

    #    points_list.append("Место " + str(item[0]) + "Имя пользователя " + str(item[1]) + "Условия " + str(item[2]) +
    #		  " Дата, время" + (item[3] + delta).strftime('%d/%m/%Y, %H:%M:%S') +
    #          " Имя поля " + str(item[4]) +
    #          " Значение " + str(item[5])
    #          )

    #    tdicprsar.update({'number_of_points_read':len(results)})
    #    tdicprsar.update({'points_list':points_list})
    tdicprsar.update({'result_DF': result})
    tdicprsar.update({'result': result.to_html()})

    return
Ejemplo n.º 25
0
    'influx_waterdb_token')
water_sqs_influxdb_org = get_secret('water_sqs_influxdb_org') or os.getenv(
    'water_sqs_influxdb_org')
water_sqs_influxdb_bucket = get_secret(
    'water_sqs_influxdb_bucket') or os.getenv('water_sqs_influxdb_bucket',
                                              default='water')
water_sqs_influxdb_url = get_secret('water_sqs_influxdb_url') or os.getenv(
    'water_sqs_influxdb_url', default="influxdb")
log.debug(f"Influx ip: {water_sqs_influxdb_url}")

queue_url = 'https://sqs.us-west-2.amazonaws.com/845159206739/sensors-maui-water.fifo'

lastWriteMap = {}

influx_client = influxdb_client.InfluxDBClient(url=water_sqs_influxdb_url,
                                               token=influx_waterdb_token,
                                               org=water_sqs_influxdb_org)
buckets_api = influx_client.buckets_api()
query_api = influx_client.query_api()
write_api = influx_client.write_api()
# lets see if we have bucket 'water'
water_bucket = next((bucket for bucket in buckets_api.find_buckets().buckets
                     if bucket.name == water_sqs_influxdb_bucket), None)
if water_bucket == None:
    # create the bucket
    log.info("'water' bucket not found, creating...")
    water_bucket = buckets_api.create_bucket(
        bucket_name=water_sqs_influxdb_bucket, org=water_sqs_influxdb_org)


def parse(value, reading_ts):
Ejemplo n.º 26
0
def read_influxDB_params(points_list, *args, **params_dict):
    import influxdb_client
    from influxdb_client.client.write_api import SYNCHRONOUS

    #    bucket = "HRWEB"
    #    org = "PM72"
    #    token = "3sLsq9ECi2eSQEYQQjIdxZsTuV6NtFcaohVKzNeILEo5hOPGCRt0Mmgzug_8iai9fCNfbUD1s3wAYd5LAXHOjg=="
    ## Store the URL of your InfluxDB instance
    #
    #    url="https://eu-central-1-1.aws.cloud2.influxdata.com/"

    client = influxdb_client.InfluxDBClient(url=URL, token=TOKEN, org=ORG)
    query_api = client.query_api()
    username = '******' + tdicprsar['username'] + '"'
    bucket = '"' + BUCKET + '"'
    measurement = '"' + MEASUREMENT + '"'
    # поправки в 1 секунду из-за скорости передачи точек в InfluxDB - больше 1 точки/секунду
    start_datatime = str(
        int((params_dict['time_of_begin'] -
             params_dict['delta_param']).timestamp()) - 1)
    end_datatime = str(int((params_dict['time_of_end']).timestamp()) + 1)
    tdicprsar.update({'TestS': start_datatime})
    tdicprsar.update({'TestE': end_datatime})
    #query = 'from(bucket:"HRWEB")\
    #|> range(start: -10m)\
    #|> filter(fn:(r) => r._measurement == "hr_measurement")\
    #|> filter(fn:(r) => r.username == "TestUser2")\
    #|> filter(fn:(r) => r.location == "Новосибирск")\
    #|> filter(fn:(r) => r._field == "hr_per_minute" )'

    #query = 'from(bucket:' + bucket + ')\
    #|> range(start: ' + start_datatime + ', stop: ' + end_datatime + ')\
    query0 = 'from(bucket:"HRWEB")\
    |> range(start: -10m)\
    |> filter(fn:(r) => r._measurement == ' + measurement + ')\
    |> filter(fn:(r) => r.username == ' + username + ')\
    |> filter(fn:(r) => r.location == "Новосибирск")\
    |> filter(fn:(r) => r._field == "hr_per_minute" )'

    query = 'from(bucket:' + bucket + ')\
    |> range(start: ' + start_datatime + ', stop: ' + end_datatime + ')\
    |> filter(fn:(r) => r._measurement == ' + measurement + ')'  #\
    #|> filter(fn:(r) => r.username == ' + username + ')\
    #|> filter(fn:(r) => r.location == "Новосибирск")\
    #|> filter(fn:(r) => r._field == "hr_per_minute" )'

    query2 = 'from(bucket:' + bucket + ')\
    |> range(start: 1623498412, stop: 1623498487)\
    |> filter(fn:(r) => r._measurement == ' + measurement + ')\
    |> filter(fn:(r) => r.username == ' + username + ')\
    |> filter(fn:(r) => r.location == "Новосибирск")\
    |> filter(fn:(r) => r._field == "hr_per_minute" )'

    tdicprsar.update({'TestA': query})

    result = client.query_api().query(org=ORG, query=query)
    tdicprsar.update({'TestRR': len(result)})

    results = []
    for table in result:
        for record in table.records:
            results.append(
                (record.values.get('location'), record.values.get('username'),
                 record.values.get('conditions'), record.get_time(),
                 record.get_field(), record.get_value()))

    tdicprsar.update({'TestRT': len(results)})
    #tdicprsar.update({'TestTK':schema.tagKeys(bucket: "HRWEB")})

    delta = timedelta(hours=7, minutes=0)
    for item in results:

        points_list.append("Место " + str(item[0]) + "Имя пользователя " +
                           str(item[1]) + "Условия " + str(item[2]) +
                           " Дата, время" +
                           (item[3] + delta).strftime('%d/%m/%Y, %H:%M:%S') +
                           " Имя поля " + str(item[4]) + " Значение " +
                           str(item[5]))

    tdicprsar.update({'number_of_points_read': len(results)})
    tdicprsar.update({'points_list': points_list})
    tdicprsar.update({'results': results})

    return
Ejemplo n.º 27
0
    def __init__(self, influx_config_file):
        with open(influx_config_file, 'r') as stream:
            try:
                db_config = yaml.safe_load(stream)
            except yaml.YAMLError as exc:
                print(exc)

        self.client = influxdb_client.InfluxDBClient(
            url=db_config["url"],
            token=db_config["token"],
            org=db_config["org"]
        )

        print(db_config)

        self.write_api = self.client.write_api(write_options=SYNCHRONOUS)

        p = influxdb_client.Point("my_measurement").tag("location", "Prague").field("temperature", 25.3)
        self.write_api.write(bucket=db_config["bucket"],
                             org=db_config["org"],
                             record=p)

        self.query_api = self.client.query_api()
        query = f'from(bucket:"{db_config["bucket"]}")\
        |> range(start: -10m)\
        |> filter(fn:(r) => r._measurement == "my_measurement")\
        |> filter(fn: (r) => r.location == "Prague")\
        |> filter(fn:(r) => r._field == "temperature" )'
        result = self.query_api.query(org=db_config["org"], query=query)
        results = []
        for table in result:
            for record in table.records:
                results.append((record.get_field(), record.get_value()))

        print(results)


        t1 = datetime.datetime.now(tz=datetime.timezone.utc)
        time.sleep(1)
        t2 = datetime.datetime.now(tz=datetime.timezone.utc)

        point1 = {"measurement": "h2o_feet",
                  "tags": {"location": "coyote_creek"},
                  "fields": {"water_level": 2.0},
                  "time": t1}

        point2 = {"measurement": "h2o_feet",
                  "tags": {"location": "coyote_creek"},
                  "fields": {"water_level": 3.0},
                  "time": t2}

        points = [point1, point2]

        self.write_api.write(bucket=db_config["bucket"], org=db_config["org"], record=points)
        #
        query = f'from(bucket:"{db_config["bucket"]}")\
        |> range(start: -10m)\
        |> filter(fn: (r) => r._measurement == "h2o_feet")\
        |> filter(fn: (r) => r.location == "coyote_creek")\
        |> filter(fn: (r) => r._field == "water_level" )'

        result = self.query_api.query(org=db_config["org"],
                                      query=query)

        results = []
        for table in result:
            for record in table.records:
                results.append((record.get_value(), record.get_field()))

        print(results)
Ejemplo n.º 28
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--debug", action="store_true")
    parser.add_argument("--rabbitmq_host",
                        default=getenv("RABBITMQ_HOST", "localhost"))
    parser.add_argument("--rabbitmq_port",
                        default=getenv("RABBITMQ_PORT", "5672"),
                        type=int)
    parser.add_argument("--rabbitmq_username",
                        default=getenv("RABBITMQ_USERNAME", ""))
    parser.add_argument("--rabbitmq_password",
                        default=getenv("RABBITMQ_PASSWORD", ""))
    parser.add_argument("--rabbitmq_cacertfile",
                        default=getenv("RABBITMQ_CACERTFILE", ""))
    parser.add_argument("--rabbitmq_certfile",
                        default=getenv("RABBITMQ_CERTFILE", ""))
    parser.add_argument("--rabbitmq_keyfile",
                        default=getenv("RABBITMQ_KEYFILE", ""))
    parser.add_argument("--rabbitmq_exchange",
                        default=getenv("RABBITMQ_EXCHANGE", "waggle.msg"))
    parser.add_argument("--rabbitmq_queue",
                        default=getenv("RABBITMQ_QUEUE", "influx-messages"))
    parser.add_argument("--influxdb_url",
                        default=getenv("INFLUXDB_URL",
                                       "http://localhost:8086"))
    parser.add_argument("--influxdb_token", default=getenv("INFLUXDB_TOKEN"))
    parser.add_argument("--influxdb_bucket",
                        default=getenv("INFLUXDB_BUCKET", "waggle"))
    parser.add_argument("--influxdb_org",
                        default=getenv("INFLUXDB_ORG", "waggle"))

    args = parser.parse_args()

    logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO,
                        format="%(asctime)s %(message)s",
                        datefmt="%Y/%m/%d %H:%M:%S")
    # pika logging is too verbose, so we turn it down.
    logging.getLogger("pika").setLevel(logging.CRITICAL)

    logging.info("connecting to influxdb at %s", args.influxdb_url)
    client = influxdb_client.InfluxDBClient(url=args.influxdb_url,
                                            token=args.influxdb_token,
                                            org=args.influxdb_org)
    logging.info("connected to influxdb")

    writer = client.write_api(write_options=SYNCHRONOUS)

    def message_handler(ch, method, properties, body):
        try:
            msg = message.load(body)
        except Exception:
            ch.basic_ack(method.delivery_tag)
            logging.warning("failed to parse message")
            return

        try:
            record = (influxdb_client.Point(msg.name).tag(
                "node",
                msg.meta["node"]).tag("plugin", msg.meta["plugin"]).field(
                    "value", msg.value).time(msg.timestamp, WritePrecision.NS))
        except KeyError as key:
            ch.basic_ack(method.delivery_tag)
            logging.warning("message missing meta %s", key)
            return

        writer.write(bucket=args.influxdb_bucket,
                     org=args.influxdb_org,
                     record=record)
        ch.basic_ack(method.delivery_tag)
        logging.debug("proccessed message %s", msg)

    if args.rabbitmq_username != "":
        credentials = pika.PlainCredentials(args.rabbitmq_username,
                                            args.rabbitmq_password)
    else:
        credentials = pika.credentials.ExternalCredentials()

    if args.rabbitmq_cacertfile != "":
        context = ssl.create_default_context(cafile=args.rabbitmq_cacertfile)
        # HACK this allows the host and baked in host to be configured independently
        context.check_hostname = False
        if args.rabbitmq_certfile != "":
            context.load_cert_chain(args.rabbitmq_certfile,
                                    args.rabbitmq_keyfile)
        ssl_options = pika.SSLOptions(context, args.rabbitmq_host)
    else:
        ssl_options = None

    params = pika.ConnectionParameters(host=args.rabbitmq_host,
                                       port=args.rabbitmq_port,
                                       credentials=credentials,
                                       ssl_options=ssl_options,
                                       retry_delay=60,
                                       socket_timeout=10.0)

    conn = pika.BlockingConnection(params)
    ch = conn.channel()
    ch.queue_declare(args.rabbitmq_queue, durable=True)
    ch.queue_bind(args.rabbitmq_queue, args.rabbitmq_exchange, "#")
    ch.basic_consume(args.rabbitmq_queue, message_handler)
    ch.start_consuming()
Ejemplo n.º 29
0
effective_voltage = 100  # 交流電源の実効電圧

# `pot.value`の値は0から1の範囲
pot = MCP3008(channel=0, max_voltage=max_voltage)

# 2.5Vに分圧しているので`pot.value`の平均は0.5になるはずだが, 実際には少しずれている.
# それがこの変数である.
bias = 0.492533

# influxdbの設定を読み込んでいく
bucket = os.environ["INFLUXDB_BUCKET"]
org = os.environ["INFLUXDB_ORGANIZATION"]
token = os.environ["INFLUXDB_TOKEN"]
url = os.environ["INFLUXDB_URL"]

client = influxdb_client.InfluxDBClient(url=url, token=token, org=org)
write_api = client.write_api(write_options=SYNCHRONOUS)


# クランプ内に流れる電力を返す(交流なので負の値もありえる)
def current():
    # `pot.value`の取る値[0, +1]を[-0.5, +0.5]の範囲にしたあと,
    # 最大電圧(5.0V)をかけて電圧に変換する.
    voltage = (pot.value - bias) * max_voltage
    # オームの法則から, センサーに流れている電流を計算する.
    ampere = voltage / resist
    # CTセンサに流れている電流から, 観測対象の回路の電流を計算する.
    ampere_observe = ampere * rate
    # 電流*実効電圧で電力量を計算する.
    watt = ampere_observe * effective_voltage
    return watt
Ejemplo n.º 30
0
 def provide_influxdb_v2_client(
         self, config: InfluxDBConfigV2) -> influxdb_client.InfluxDBClient:
     return influxdb_client.InfluxDBClient(url=config.url,
                                           token=config.token,
                                           org=config.org)