示例#1
0
 def __init__(self, consumer, deserialize):
     super().__init__(consumer)
     if self.enabled:
         self.deserialize = deserialize
         self._reset_stats()
         self.influxdb_client = InfluxDBClient(host=INFLUXDB_HOST,
                                               db=INFLUXDB_DB,
                                               loop=self.loop)
示例#2
0
async def main():
    client = InfluxDBClient(**influxdb_client_configuration)
    async for result in scan(5):
        measurement = RuuviTagMeasurementTest1(
            name=result.name,
            temperature=result.temperature,
            humidity=result.humidity,
            pressure=result.pressure,
            voltage=result.voltage,
        )
        asyncio.create_task(client.write(measurement))
示例#3
0
async def initialize_database(app):
    db_host = os.getenv("INFLUXDB_HOST", "localhost")
    db_port = os.getenv("INFLUXDB_PORT", 8086)
    db_database = os.getenv("ENVIRONMENT", "aquarium")
    db = InfluxDBClient(host=db_host, port=db_port, db=db_database)
    app["influx-db"] = db
    await db.create_database(db=db_database)
示例#4
0
def df_client():
    if utils.pd is None:
        return
    with InfluxDBClient(db='df_client_test', mode='blocking', output='dataframe') as client:
        client.create_database()
        yield client
        client.drop_database()
示例#5
0
async def _init_nosql_dbs(app: web.Application) -> web.Application:
    """
    Wrapper over initializing dbase.

    :param app: an instance of aiohttp application.
    :return: an instance of aiohttp application with updates.
    """
    ###
    # Create connection to databases
    NoSQLDatabases = type(
        'NoSQLDatabases', (SimpleNamespace,), {'mongo': None, 'influx': None}
    )
    app.dbs = NoSQLDatabases()

    cfg = deep_get(app.shared.extras, 'dbs.mongo',  {})
    app.dbs.mongo  = MongosDBClient(**cfg, loop=app.loop)

    cfg = deep_get(app.shared.extras, 'dbs.influx', {})
    app.dbs.influx = InfluxDBClient(**cfg, loop=app.loop)

    aws: Tuple[
         Union[Awaitable[_Returns_co], Callable[..., _Returns_co]], ...] = (
            app.dbs.mongo.ping(), app.dbs.influx.ping(),
    )
    pings = await asyncio.gather(*aws, loop=app.loop, return_exceptions=True)

    ###
    # Attention! I apologize in advance, but the code that was in this place I had
    # to remove that it would be completely not to violate NDA.
    # ¯\_(ツ)_/¯
    # Thanks for understanding!

    return app
async def configure_connection(settings: Settings):
    influx.client = InfluxDBClient(host=settings.influx_db_host,
                                   port=settings.influx_db_port,
                                   database=settings.influx_db_database,
                                   username=settings.influx_db_user,
                                   password=settings.influx_db_password)
    logger.info("Connection set up for InfluxDB")
示例#7
0
文件: app.py 项目: hramcovdv/watcher
async def main(config: Dict[str, Any]) -> None:
    """ Main worker
    """
    async with InfluxDBClient(host=config['INFLUXDB_HOST'],
                              port=int(config['INFLUXDB_PORT']),
                              username=config['INFLUXDB_USERNAME'],
                              password=config['INFLUXDB_PASSWORD'],
                              db=config['INFLUXDB_DB']) as client:
        icmp_redis_pool = await create_redis_pool(
            (config['REDIS_HOST'], int(config['REDIS_PORT'])),
            db=int(config['REDIS_DB']))

        snmp_redis_pool = await create_redis_pool(
            (config['REDIS_HOST'], int(config['REDIS_PORT'])),
            db=int(config['REDIS_DB']))

        icmp_queue = AioRedisQueue(redis=icmp_redis_pool,
                                   queue_name=config['ICMP_QUEUE_NAME'])

        snmp_queue = AioRedisQueue(redis=snmp_redis_pool,
                                   queue_name=config['SNMP_QUEUE_NAME'])

        tasks = []
        for _ in range(int(config['TASKS'])):
            icmp_task = asyncio.create_task(
                influx_callback(icmp_worker(icmp_queue), client))
            tasks.append(icmp_task)

        for _ in range(int(config['TASKS'])):
            snmp_task = asyncio.create_task(
                influx_callback(snmp_worker(snmp_queue), client))
            tasks.append(snmp_task)

        await asyncio.gather(*tasks)
示例#8
0
def get_influx(influx_config):
    assert influx_config 
    return InfluxDBClient(
            host=influx_config['host'], 
            port=influx_config['port'], 
            username=influx_config['username'],
            password=influx_config['password'],
            db=influx_config['db'])
示例#9
0
async def iter_client():
    async with InfluxDBClient(db='iter_client_test',
                              mode='async',
                              output='iterable') as client:
        await client.create_database()
        await client.write([p for p in utils.cpu_load_generator(100)])
        await yield_(client)
        await client.drop_database()
示例#10
0
文件: log.py 项目: Forevka/Emcd
async def log(data: dict):
    try:
        async with InfluxDBClient(host=InfluxDBParams.STATS_HOST,
                                  db=InfluxDBParams.STATS_DB,
                                  username=InfluxDBParams.STATS_USER,
                                  password=InfluxDBParams.STATS_PASS,
                                  timeout=INFLUX_WRITE_TIMEOUT_SEC) as client:
            await client.write(data)
    except InfluxDBWriteError as ex:
        logging.error(f"InfluxDB write error: {str(ex)}")
示例#11
0
    async def main_coroutine(self):
        """Main coroutine."""
        client = InfluxDBClient(host=self.db_server, db=self.db_name)
        try:
            await client.create_database(host=self.db_server, db=self.db_name)
        except aiohttp.client_exceptions.ClientConnectorError as e:
            log.error(e)
            return
        log_flag = True
        while self.run_flag:
            try:
                cur_key = self.c_params["l_rtt_key"]
                l_rtt = self.containers[cur_key]["rtt"]
                for key, attrs in self.containers.items():
                    query = f'select mean("value") from rtt where ("host" = \'{key}\') and time > now() - 3s fill(0) limit 1'
                    query_res = await client.query(query)
                    series = query_res["results"][0].get("series")
                    if series:
                        values = series[0].get("values")
                        point = float(values[0][-1])
                        self.containers[key]["rtt"] = point
                        # if current path is down, steer away
                        if point == 0.0:
                            if log_flag and key == cur_key:
                                log.info(
                                    "Current path is down! Steering away.")
                                log_flag = False
                            self.containers[key]["rtt"] = self.c_params[
                                "max_rtt"]
                await asyncio.sleep(self.frequency)
                # optimize
                log.debug(f"current_lowest {self.containers[cur_key]['rtt']}")

                # find lowest first
                for key, attrs in self.containers.items():
                    # if the latency is lower, update lowest rtt key
                    if attrs["rtt"] * 1.20 < l_rtt and attrs["rtt"] > 0:
                        cur_key = key
                        l_rtt = attrs["rtt"]
                if self.c_params["l_rtt_key"] != cur_key:
                    log_flag = True
                    evc_path = self.containers[cur_key]["evc_path"]
                    log.info(f"changing to lane #{evc_path}")
                    self.c_params["l_rtt_key"] = cur_key

                    async with aiohttp.ClientSession() as session:
                        my_str = f"http://{self.http_server}:{self.http_port}/{self.endpoint}/{evc_path}"
                        print(my_str)
                        data = await self.http_post(session, my_str)
                        log.info(data)

            except aiohttp.client_exceptions.ClientConnectorError as e:
                log.error(f"HTTP server {self.http_server} connection refused")
                return
示例#12
0
    def __init__(self,
                 db="weather",
                 host="localhost",
                 port=8086,
                 measurement='weather',
                 verbose=False):
        self.db = db
        self.measurement = measurement
        self.client = InfluxDBClient(db=db, host=host, port=port)

        self.verbose = verbose
示例#13
0
async def cache_client():
    opts = dict(db='cache_client_test',
                redis_opts=dict(
                    address='redis://localhost:6379/8',
                    timeout=5,
                ),
                cache_expiry=600)
    async with InfluxDBClient(**opts, mode='async') as client:
        assert await client.create_database()
        yield client
        await client.drop_database()
        await client._redis.flushdb()
示例#14
0
async def push_metric(measurement, tags: Dict, fields: Dict):
    try:
        point = {'measurement': measurement, 'tags': tags, 'fields': fields}
        async with InfluxDBClient(host=INFLUX_HOST,
                                  port=INFLUX_PORT,
                                  username=INFLUX_USERNAME,
                                  password=INFLUX_PASSWORD,
                                  db=INFLUX_DB,
                                  mode='async') as client:
            await client.write(point)
    except Exception as e:
        print(e)
        pass
示例#15
0
async def get_and_insert_datum_into_influx(verbose=False):
    point = {
        'measurement': 'airsensors',
        'tags': {
            'host': 'airsensors',
            'location': 'home'
        },
        'fields': await get_aggregate_sensor_datum()
    }
    async with InfluxDBClient(host=HOST, db=DB) as client:
        await client.create_database(db=DB)
        await client.write(point)
        if verbose:
            print(json.dumps(point['fields'], indent=2))
async def efd_client():
    df = pd.read_hdf(PATH / 'efd_test.hdf')
    async with InfluxDBClient(db='client_test',
                              mode='async',
                              output='dataframe') as client:
        await client.create_database()
        await client.write(df, measurement='lsst.sal.fooSubSys.test')
        efd_client = EfdClient('test_efd',
                               db_name='client_test',
                               client=client)
        # Monkey patch the client to point to an existing schema registry
        # Note this is only available if on the NCSA VPN
        efd_client.schema_registry = 'https://lsst-schema-registry-efd.ncsa.illinois.edu'
        yield efd_client
        await client.drop_database()
示例#17
0
async def fetchMeasurements(loop, display):
    async with InfluxDBClient(db='testdb', output='iterable') as client:
        resp = await client.query('show measurements',
                                  chunked=True,
                                  chunk_size=128)
        async for chunk in resp.iterchunks():
            for result in chunk['results']:
                series = result['series']
                for s in series:
                    for measurement in s['values']:
                        display.AddMeasurement(measurement[0])
    try:
        loop.create_task(display.Refresh())
    except Exception as e:
        print(e)
示例#18
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.launchtime = datetime.datetime.utcnow()

        # COMMON ATTRIBUTES
        self.config: dict = json.load(open('config.json', 'r'))
        self.configs = {}
        self.overrides: dict = json.load(open('overrides.json', 'r'))
        self.override_save.start()
        self.tips = json.load(open('tips.json', 'r'))
        self.premiumGuilds = []
        self.db: asyncpg.pool.Pool = None
        self.realtime_members = True
        self.dev = kwargs.pop('dev', False)

        # CRAB
        self.crab = '🦀'

        # LOGGING
        logging.basicConfig(filename='bot.log', level=logging.INFO)
        self.logger = logging.getLogger('Fire')
        stdout = logging.StreamHandler(sys.stdout)
        stdout.setLevel(logging.INFO)
        COLOR_FORMAT = colorformat.formatter_message(
            "[$BOLD%(name)s$RESET][%(levelname)s] %(message)s $RESET($BOLD%(filename)s$RESET:%(lineno)d)"
        )
        stdout.setFormatter(colorformat.ColoredFormatter(COLOR_FORMAT))
        self.logger.addHandler(stdout)

        # SENTRY
        if 'sentry' in self.config:
            sentry_sdk.init(self.config['sentry'])

        # INFLUX
        if 'influx_user' in self.config and 'influx_pass' in self.config:
            self.influx = InfluxDBClient(db='firedev' if self.dev else 'fire',
                                         username=self.config['influx_user'],
                                         password=self.config['influx_pass'])

        # MODULES
        self.loadModules()

        # COMMANDS
        self.loadCommands()
        self.cmdresp = {}

        # EVENTS
        self.loadEvents()
示例#19
0
async def log(user_id: int, event: EventCommand):
    data = {
        "measurement": "bot_commands",
        "time": datetime.now(),
        "fields": {"event": 1},
        "tags": {
            "user": str(user_id),
            "command": event.value
        }
    }
    try:
        async with InfluxDBClient(host=DBParams.STATS_HOST, db=DBParams.STATS_DB,
                                  username=DBParams.STATS_USER, password=DBParams.STATS_PASS) as client:
            await client.write(data)
    except InfluxDBWriteError as ex:
        logging.error(f"InfluxDB write error: {str(ex)}")
示例#20
0
async def run_test(number_of_day, total_number, type_request):
    try:
        end_time = datetime.datetime(
            2019, 1, 1, 0, 0, 0) + datetime.timedelta(days=number_of_day)
        start_time = end_time - datetime.timedelta(days=1)
        query = QUERY_TYPE_LIST[type_request].replace(
            'START_TIME', '{}000000000'.format(
                int(time.mktime(start_time.timetuple())))).replace(
                    'END_TIME', '{}000000000'.format(
                        int(time.mktime(end_time.timetuple()))))
        async with InfluxDBClient(db='benchmarkdb') as client:
            try:
                with open(
                        '../data/influx/influx_1sec_{}d.dat'.format(
                            number_of_day), 'rt') as f:
                    network_setup = ''
                    if LATENCY_TYPE or PACKETLOSS_TYPE:
                        network_setup = '_{}_{}'.format(
                            LATENCY_TYPE, PACKETLOSS_TYPE)
                    with open(
                            '{}/{}/{}_{}_{}_{}{}.txt'.format(
                                ANALYSIS_DIRECTORY, BENCHMARK_TEST,
                                BENCHMARK_TEST, type_request, total_number,
                                number_of_day, network_setup), 'w') as fw:
                        i = 0
                        for i in range(864):
                            bulk_data = []
                            for j in range(100):
                                l = f.readline()
                                bulk_data.append(l)

                            prev_time = time.time()
                            await client.write(bulk_data)
                            bulk_time = time.time()
                            await client.query(query, db='benchmarkdb')
                            curr_time = time.time()

                            fw.write('{}\t{}\t{}\t{}\n'.format(
                                i, curr_time - prev_time,
                                bulk_time - prev_time, bulk_time - curr_time))

            except FileNotFoundError:
                print(
                    'You need to generate data first. Please use "data_generator.py" with influx data format.'
                )
    except Exception as e:
        print('Error: {}'.format(e))
示例#21
0
async def vmstat(loop, cmd,  hostname, restart=True):
    proc = await asyncio.create_subprocess_shell(cmd,
                                                 stdout=asyncio.subprocess.PIPE,
                                                 stderr=asyncio.subprocess.PIPE)
    
    # procs -----------memory---------- ---swap-- -----io---- -system-- ------cpu----- -----timestamp-----
    # r  b   swpd   free   buff  cache   si   so    bi    bo   in   cs us sy id wa st                 UTC
    # 0  0   2048 286872 1318492 14039020    0    0     1     9    2    1  3  1 96  0  0 2019-01-11 23:11:04
    async with InfluxDBClient(db='testdb', output='iterable') as client:
        done = False
        while not done:
            line = await proc.stdout.readline()
            logging.debug(line)
            if not line:
                done = True
                continue
            
            l = line.strip().decode('ascii').split(' ')
            l = list(filter(None, l))
            if len(l) == 19:
                freeMem = float(l[3])
                us = float(l[12])
                dayStr = l[17]
                timeStr = l[18]
                points = []
                points.append({
                    'time': '%sT%sZ' % (dayStr, timeStr),
                    'measurement': 'cpu_load_short',
                    'tags': {'host': hostname },
                    'fields': {'value': us}
                })
                points.append({
                    'time': '%sT%sZ' % (dayStr, timeStr),
                    'measurement': 'free_mem',
                    'tags': {'host': hostname },
                    'fields': {'value': freeMem}
                })

                for point in points:
                    logging.debug(point)
                    await client.write(point)
            
        logging.info("%s: Return Code %s", cmd, proc.returncode)

        if restart:
            loop.create_task(vmstat(loop, cmd, hostname, restart))
示例#22
0
文件: client.py 项目: viniarck/dvel
 def __init__(
     self,
     name: str,
     https_info: HTTPServerInfo,
     dbs_info: DBServerInfo,
     frequency: float = 0.001,
     timeout: int = 1,
 ) -> None:
     """Constructor of Client."""
     self.name = name
     self.h_info = https_info
     self.d_info = dbs_info
     self.influx_client = InfluxDBClient(
         host=dbs_info.addr, db=dbs_info.name, port=dbs_info.port
     )
     self.timeout = timeout
     self.frequency = frequency
示例#23
0
    async def _generate_connections(
            self, loop: Type[asyncio.BaseEventLoop]
    ) -> Iterator[Type[InfluxDBClient]]:
        """Iterator that keeps yielding new (connected) clients.

        It will only yield an influx client if it could successfully ping the remote.
        There is no limit to total number of clients yielded.
        """
        while True:
            try:
                async with InfluxDBClient(host=INFLUX_HOST,
                                          db=self._database,
                                          loop=loop) as client:
                    await client.ping()
                    LOGGER.info(f'Connected {self}')
                    yield client

            except ClientConnectionError as ex:
                # Sleep and try reconnect
                await asyncio.sleep(RECONNECT_INTERVAL_S)
示例#24
0
async def detect(id_user: int, command: Command):
    data = {
        "measurement": "bot_commands",
        "time": datetime.now(),
        "fields": {
            "event": 1
        },
        "tags": {
            "user": str(id_user),
            "command": command.value
        }
    }
    try:
        async with InfluxDBClient(host=EvnDB.STATS_HOST,
                                  db=EvnDB.STATS_DB,
                                  username=EvnDB.STATS_USER,
                                  password=EvnDB.STATS_PASS) as client:
            await client.write(data)
    except InfluxDBWriteError as e:
        logging.error(f"InfluxDB write error: {e}")
示例#25
0
async def main():
    async with InfluxDBClient(db='testdb', output='iterable') as client:

        #await client.create_database(db='testdb')
        for x in range(1,10):
            current_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')

            point = {
                'time': current_time,
                'measurement': 'cpu_load_short',
                'tags': {'host': 'server01',
                         'region': 'us-west'},
                'fields': {'value': 0.64+x}
            }

            await client.write(point)
            await asyncio.sleep(.1)
        resp = await client.query('SELECT value FROM cpu_load_short', chunked=True, chunk_size=128)
        async for chunk in resp.iterchunks():
            yield chunk
示例#26
0
async def main(loop):
    async with InfluxDBClient(host=INFLUX_HOST, db=INFLUX_DB) as client:
        await client.create_database(db=INFLUX_DB)

    connection = await aio_pika.connect_robust("amqp://{}:{}@{}/".format(
        USER, PASSWORD, HOST),
                                               loop=loop)

    queue_name = "metrics"

    async with connection:
        channel = await connection.channel()
        queue = await channel.declare_queue(queue_name,
                                            auto_delete=False,
                                            durable=True,
                                            passive=True)
        async with queue.iterator() as queue_iter:
            async for message in queue_iter:
                async with message.process():
                    e = Metrics()
                    await e.send(message.body)
示例#27
0
    async def update_loop(self):
        await self.bot.wait_until_ready()
        async with InfluxDBClient(db='xenon') as client:
            await client.create_database(db='xenon')
            while True:
                await asyncio.sleep(60)
                await self.calculate_total_values()
                try:
                    for shard_id, stats in self.stats.items():
                        await client.write([{
                            "measurement": measurement,
                            "tags": {
                                "shard": str(shard_id)
                            },
                            "fields": {
                                "value": value
                            }
                        } for measurement, value in stats.items()])

                    self.build_scheme()
                except:
                    traceback.print_exc()
示例#28
0
async def perfTask(loop, cmd, pid, hostname, restart=True):
    proc = await asyncio.create_subprocess_shell(cmd,
                                                 stdout=asyncio.subprocess.PIPE,
                                                 stderr=asyncio.subprocess.PIPE)
    
    async with InfluxDBClient(db='testdb', output='iterable') as client:
        done = False
        while not done:
            try:
                line = await proc.stdout.readline()

                if not line:
                    done = True
                    continue
                
                l = line.strip().decode('ascii').split('#')
                l = list(filter(None, l))
                logging.debug(l)
                current_time = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ')
                value = int (l[1])
                if value:
                    point = {
                        'time': current_time,
                        'measurement': 'perf_events',
                        'tags': { 'type': l[2], 'host': hostname, 'pid': pid },
                        'fields': {'value': int(l[1])}
                    }
                    logging.debug(point)
                
                    await client.write(point)
            except Exception as e:
                logging.error(e)
                done = True


        logging.info("%s: Return Code %s", cmd, proc.returncode)

        if restart:
            loop.create_task(vmstat(loop, cmd, pid, hostname, restart))
示例#29
0
async def periodic(timeout):
    """ Tasks runs every periods setting by the global Variable "tasks_runs_every_n_seconds"
    """
    global session, api_failure_count, api_count, token_start_time, influxdb_client
    looping_no = 0
    token_start_time = time.time()

    # if using influx client, run following if block
    if influxdb_write_enable:
        influxdb_client = InfluxDBClient(host=influxdb_client_host_ip, db=dnac["name"])
        await influxdb_client.create_database(db=dnac["name"])

    while True:
        looping_no += 1

        logconsole.info(f"looping {str(looping_no)} start")
        task_run_start_time = time.time()

        await get_token_refresh()
        await api_task(session, get_current_time())

        elapsed = time.time() - task_run_start_time
        logconsole.info(
            f"looping no.{str(looping_no)} took: {elapsed:0.2f}s, api failed/total: {api_failure_count}/{api_count}")

        # If you need runs infinitely, will NOT break the while loop
        if not runs_infinitely:
            if looping_no >= 3:
                break

        if timeout > elapsed:
            await asyncio.sleep(timeout - elapsed)

    if session:
        await session.close()

    return
示例#30
0
    async def run(self):
        """Overrides RepeaterFeature.run()"""
        write_interval = self.app['config']['write_interval']
        try:
            async with InfluxDBClient(host=INFLUX_HOST,
                                      db=self._database) as client:
                await client.ping()
                LOGGER.info(f'Connected {self}')
                await client.create_database(db=self._database)

                while True:
                    await asyncio.sleep(write_interval)

                    if not self._pending:
                        # Do a quick check whether the connection is still alive
                        await client.ping()
                        continue

                    points = self._pending.copy()
                    await client.write(points)
                    LOGGER.debug(f'Pushed {len(points)} points to database')
                    # Make sure to keep points that were inserted during the write
                    self._pending = self._pending[len(points):]

        except ClientConnectionError as ex:
            if self._last_ok:
                LOGGER.warn(f'Database connection failed {self} {ex}')
                self._last_ok = False
            await asyncio.sleep(RECONNECT_INTERVAL_S)
            self._avoid_overflow()

        except asyncio.CancelledError:
            raise

        except Exception:
            await asyncio.sleep(RECONNECT_INTERVAL_S)
            raise