コード例 #1
0
 def __init__(self, database_config: DataBaseConfig,
              client_config: ClientConfig) -> None:
     self.database_config = database_config
     self.client_config = client_config
     self.logger: Logger = JsonLogger.with_default_handlers(
         name="log/server.log")
     self.status_call_client = False
コード例 #2
0
async def initialize(app: aiohttp.web.Application):
    """
    Инициализация контекста.

    :return:
    """

    global task_provider, _db_engine, _rmq_connection_pool, _rmq_channel_pool, \
        producer, was_initialized, logger

    _db_engine = await gino.create_engine(configs.DATABASE_URI)
    task_provider = TaskProvider(_db_engine)

    loop = asyncio.get_running_loop()
    _rmq_connection_pool = aio_pika.pool.Pool(_get_connection, loop=loop)
    _rmq_channel_pool = aio_pika.pool.Pool(_get_channel, loop=loop)

    async with _rmq_channel_pool.acquire() as channel:
        await channel.declare_queue(configs.TASK_Q_NAME, durable=True)

    producer = Producer(_rmq_channel_pool)

    logger = JsonLogger.with_default_handlers(
        level=logging.DEBUG,
        exclude_fields=[FUNCTION_NAME_FIELDNAME, 'file_path', 'line_number'])

    was_initialized = True
コード例 #3
0
    async def test_extra_parameter_adds_content_to_root_of_all_messages(self):
        logger = JsonLogger.with_default_handlers(level=10,
                                                  extra={"dog": "Xablau"})
        message = {"log_message": "Xena"}
        await logger.info(message)

        logged_content = json.loads(await self.stream_reader.readline())

        self.assertEqual(logged_content["msg"]["log_message"], "Xena")
        self.assertEqual(logged_content["dog"], "Xablau")
コード例 #4
0
    async def test_extra_parameter_adds_content_to_root_of_all_messages(self):
        logger = JsonLogger.with_default_handlers(level=10,
                                                        extra={'dog': 'Xablau'})
        message = {'log_message': 'Xena'}
        await logger.info(message)

        logged_content = json.loads(await self.stream_reader.readline())

        self.assertEqual(logged_content['msg']['log_message'], 'Xena')
        self.assertEqual(logged_content['dog'], 'Xablau')
コード例 #5
0
    async def setUp(self):
        r_fileno, w_fileno = os.pipe()
        self.read_pipe = os.fdopen(r_fileno, 'r')
        self.write_pipe = os.fdopen(w_fileno, 'w')

        patch('aiologger.logger.sys.stdout', self.write_pipe).start()
        patch('aiologger.logger.sys.stderr', self.write_pipe).start()

        self.stream_reader, self.reader_transport = await self._make_read_pipe_stream_reader()
        self.logger = JsonLogger.with_default_handlers(level=logging.DEBUG)
コード例 #6
0
    async def test_default_fields_are_excludeable(self):
        logger = JsonLogger.with_default_handlers(
            level=10,
            exclude_fields=[FUNCTION_NAME_FIELDNAME, LOG_LEVEL_FIELDNAME],
        )

        await logger.info("Xablau")
        logged_content = json.loads(await self.stream_reader.readline())

        self.assertNotIn(FUNCTION_NAME_FIELDNAME, logged_content)
        self.assertNotIn(LOG_LEVEL_FIELDNAME, logged_content)
コード例 #7
0
    async def test_it_logs_time_at_desired_tz(self):
        desired_tz = timezone(timedelta(hours=-1))
        now = datetime.now(tz=timezone.utc).astimezone(desired_tz).isoformat()

        logger = JsonLogger.with_default_handlers(level=logging.DEBUG, tz=desired_tz)
        await logger.error("Batemos tambores, eles panela.")

        logged_content = await self.stream_reader.readline()
        json_log = json.loads(logged_content)

        self.assertEqual(json_log['logged_at'], now)
コード例 #8
0
    async def test_extra_parameter_on_log_method_function_call_updates_extra_parameter_on_init(self):
        logger = JsonLogger.with_default_handlers(level=10,
                                                        extra={'dog': 'Xablau'})
        message = {'log_message': 'Xena'}
        await logger.info(message, extra={"ham": "eggs"})

        logged_content = json.loads(await self.stream_reader.readline())

        self.assertEqual(logged_content['msg']['log_message'], 'Xena')
        self.assertEqual(logged_content['dog'], 'Xablau')
        self.assertEqual(logged_content['ham'], 'eggs')
コード例 #9
0
    async def setUp(self):
        r_fileno, w_fileno = os.pipe()
        self.read_pipe = os.fdopen(r_fileno, "r")
        self.write_pipe = os.fdopen(w_fileno, "w")

        patch("aiologger.logger.sys.stdout", self.write_pipe).start()
        patch("aiologger.logger.sys.stderr", self.write_pipe).start()

        self.stream_reader, self.reader_transport = await make_read_pipe_stream_reader(
            self.loop, self.read_pipe)
        self.logger = JsonLogger.with_default_handlers(level=LogLevel.DEBUG)
コード例 #10
0
def get_logger() -> Logger:
    """Instantiate app logger"""
    settings = LoggerSettings()
    # Configure uvicorn logger
    uvicorn_access_logger = logging.getLogger("uvicorn.access")
    uvicorn_access_logger.setLevel(settings.log_level)
    return JsonLogger.with_default_handlers(
        name="serengeti",
        level=getattr(LogLevel, settings.log_level, LogLevel.DEBUG),
        serializer_kwargs={"indent": 4},
    )
コード例 #11
0
    async def test_extra_parameter_on_log_method_function_call_updates_extra_parameter_on_init(
            self):
        logger = JsonLogger.with_default_handlers(level=10,
                                                  extra={"dog": "Xablau"})
        message = {"log_message": "Xena"}
        await logger.info(message, extra={"ham": "eggs"})

        logged_content = json.loads(await self.stream_reader.readline())

        self.assertEqual(logged_content["msg"]["log_message"], "Xena")
        self.assertEqual(logged_content["dog"], "Xablau")
        self.assertEqual(logged_content["ham"], "eggs")
コード例 #12
0
ファイル: context.py プロジェクト: GoncharovArtyom/TaskQueue
async def initialize():
    """
    Инициализация контекста.

    :return:
    """

    global task_provider, queue, connection, was_initialized, logger

    _engine = await gino.create_engine(configs.DATABASE_URI)
    task_provider = TaskProvider(_engine)

    connection = await aio_pika.connect(configs.RABBITMQ_URI,
                                        loop=asyncio.get_running_loop())
    channel = await connection.channel()
    queue = await channel.declare_queue(configs.TASK_Q_NAME, durable=True)

    logger = JsonLogger.with_default_handlers(
        level=logging.DEBUG,
        exclude_fields=[FUNCTION_NAME_FIELDNAME, 'file_path', 'line_number'])

    was_initialized = True
コード例 #13
0
import asyncio

import discord
from aiologger.loggers.json import JsonLogger
from discord.ext.commands import Bot, MissingRequiredArgument, context

from search import GoogleSearch
from storage import StorageManager

logger = JsonLogger.with_default_handlers(name=__name__)


def initialize_bot(bot: Bot, storage_manager: StorageManager,
                   google_search: GoogleSearch):
    @bot.command()
    async def google(ctx: context.Context, arg):
        '''
        Bot command to let user perform a google search and get top 5 links
        Ex:
        !google node
        !google "game of thrones"
        '''
        # Don't need to wait for the delivery of message hence not using await
        asyncio.create_task(ctx.send(f'Fetching top 5 links for {arg}'))

        # This gives user typing symbol in UI
        with ctx.channel.typing():
            # Google search is not dependent on the storage hence not awaiting it
            asyncio.create_task(
                storage_manager.save_recent_search(ctx.author.id, arg))
コード例 #14
0
    HTTP_HOST: str = "127.0.0.1"
    HTTP_PORT: int = 8080

    FLUSH_TIMEOUT: int = DefaultValues.BULK_FLUSH_INTERVAL

    # metrics
    METRICS_NAMESPACE: str = "asyncworker"
    METRICS_APPPREFIX: Optional[str]
    METRICS_ROUTE_PATH: str = "/metrics"
    METRICS_ROUTE_ENABLED: bool = True
    METRICS_DEFAULT_HISTOGRAM_BUCKETS_IN_MS: List[float] = [
        10,
        50,
        100,
        200,
        500,
        1000,
        5000,
        INFINITY,
    ]

    class Config:
        allow_mutation = False
        env_prefix = "ASYNCWORKER_"


settings = Settings()

loglevel = getattr(logging, settings.LOGLEVEL, logging.INFO)
logger = JsonLogger.with_default_handlers(level=loglevel, flatten=True)
コード例 #15
0
# -*- coding:utf-8 -*-
import sys
import getopt
import asyncio
import logging
from aiologger.loggers.json import JsonLogger
from amqp_con import AioPikaConsumer

logger = JsonLogger.with_default_handlers(
    level=logging.DEBUG,
    serializer_kwargs={'indent': 2},
    name='my-test',
    formatter=logging.Formatter(
        '%(asctime)s %(filename)s[line:%(lineno)d] '
        '[%(levelname)s] [%(threadName)s] %(message)s'),
    loop=asyncio.get_event_loop())


async def msg_process_func(routing_key, data):
    routing_key_words = routing_key.split(".")
    symbol = routing_key_words[3] + routing_key_words[4]
    if 'btcusdt' == symbol:
        await logger.debug("routing_key: %s, data: %s" % (routing_key, data))


if __name__ == "__main__":
    opts = []
    args = []
    try:
        # opts 获取参数元组(-m file_address),args 获取剩余非(-arg para)格式的参数
        opts, args = getopt.getopt(sys.argv[1:], "l:k:q:",
コード例 #16
0
    asyncio.create_task(consumer(pg_pool, queue)),
    data_streams = [
        asyncio.create_task(streamer(filename, queue)) for filename in ribs
    ]

    await asyncio.gather(*data_streams)

    await logger.shutdown()


if __name__ == "__main__":

    loop = asyncio.get_event_loop()

    # Use an async logger
    logger = JsonLogger.with_default_handlers()

    # uvloop is a faster eventloop implenation
    uvloop.install()

    # This try accept loop does not deal with shutting down coroutines properly. Needs fixing.
    try:
        loop.run_until_complete(main())
    except KeyboardInterrupt:
        tasks = asyncio.all_tasks()
        for task in tasks:
            task.cancel()
    finally:
        loop.run_until_complete(logger.shutdown())
        loop.close()