async def scrape_channel(channel, start, file, limit=None): logger = JsonLogger() handler = AsyncFileHandler(filename=file) logger.add_handler(handler) limit = int(limit) if limit and limit.isdigit() else None async for m in channel.history(limit=limit): logger = await log_message(m, logger=logger) await logger.shutdown() return file, int(time.time() - start)
def get_logger(nats_handler, service_type): """ Creates and returns a nats logging handler Args: nats_handler: a nats_handler object service_type: type of service """ handler = NatsLoggingHandler(nats_handler, service_type) formatter = ExtendedJsonFormatter(exclude_fields=["file_path"]) handler.formatter = formatter logger = JsonLogger(name=nats_handler.sender_id) logger.add_handler(handler) return logger
async def initialize(app: aiohttp.web.Application): """ Инициализация контекста. :return: """ global task_provider, _db_engine, _rmq_connection_pool, _rmq_channel_pool, \ producer, was_initialized, logger _db_engine = await gino.create_engine(configs.DATABASE_URI) task_provider = TaskProvider(_db_engine) loop = asyncio.get_running_loop() _rmq_connection_pool = aio_pika.pool.Pool(_get_connection, loop=loop) _rmq_channel_pool = aio_pika.pool.Pool(_get_channel, loop=loop) async with _rmq_channel_pool.acquire() as channel: await channel.declare_queue(configs.TASK_Q_NAME, durable=True) producer = Producer(_rmq_channel_pool) logger = JsonLogger.with_default_handlers( level=logging.DEBUG, exclude_fields=[FUNCTION_NAME_FIELDNAME, 'file_path', 'line_number']) was_initialized = True
def __init__(self, database_config: DataBaseConfig, client_config: ClientConfig) -> None: self.database_config = database_config self.client_config = client_config self.logger: Logger = JsonLogger.with_default_handlers( name="log/server.log") self.status_call_client = False
async def setUp(self): r_fileno, w_fileno = os.pipe() self.read_pipe = os.fdopen(r_fileno, 'r') self.write_pipe = os.fdopen(w_fileno, 'w') patch('aiologger.logger.sys.stdout', self.write_pipe).start() patch('aiologger.logger.sys.stderr', self.write_pipe).start() self.stream_reader, self.reader_transport = await self._make_read_pipe_stream_reader() self.logger = JsonLogger.with_default_handlers(level=logging.DEBUG)
async def test_extra_parameter_adds_content_to_root_of_all_messages(self): logger = JsonLogger.with_default_handlers(level=10, extra={'dog': 'Xablau'}) message = {'log_message': 'Xena'} await logger.info(message) logged_content = json.loads(await self.stream_reader.readline()) self.assertEqual(logged_content['msg']['log_message'], 'Xena') self.assertEqual(logged_content['dog'], 'Xablau')
async def test_extra_parameter_adds_content_to_root_of_all_messages(self): logger = JsonLogger.with_default_handlers(level=10, extra={"dog": "Xablau"}) message = {"log_message": "Xena"} await logger.info(message) logged_content = json.loads(await self.stream_reader.readline()) self.assertEqual(logged_content["msg"]["log_message"], "Xena") self.assertEqual(logged_content["dog"], "Xablau")
def get_logger() -> Logger: """Instantiate app logger""" settings = LoggerSettings() # Configure uvicorn logger uvicorn_access_logger = logging.getLogger("uvicorn.access") uvicorn_access_logger.setLevel(settings.log_level) return JsonLogger.with_default_handlers( name="serengeti", level=getattr(LogLevel, settings.log_level, LogLevel.DEBUG), serializer_kwargs={"indent": 4}, )
async def test_default_fields_are_excludeable(self): logger = JsonLogger.with_default_handlers( level=10, exclude_fields=[FUNCTION_NAME_FIELDNAME, LOG_LEVEL_FIELDNAME], ) await logger.info("Xablau") logged_content = json.loads(await self.stream_reader.readline()) self.assertNotIn(FUNCTION_NAME_FIELDNAME, logged_content) self.assertNotIn(LOG_LEVEL_FIELDNAME, logged_content)
async def test_it_logs_time_at_desired_tz(self): desired_tz = timezone(timedelta(hours=-1)) now = datetime.now(tz=timezone.utc).astimezone(desired_tz).isoformat() logger = JsonLogger.with_default_handlers(level=logging.DEBUG, tz=desired_tz) await logger.error("Batemos tambores, eles panela.") logged_content = await self.stream_reader.readline() json_log = json.loads(logged_content) self.assertEqual(json_log['logged_at'], now)
async def setUp(self): r_fileno, w_fileno = os.pipe() self.read_pipe = os.fdopen(r_fileno, "r") self.write_pipe = os.fdopen(w_fileno, "w") patch("aiologger.logger.sys.stdout", self.write_pipe).start() patch("aiologger.logger.sys.stderr", self.write_pipe).start() self.stream_reader, self.reader_transport = await make_read_pipe_stream_reader( self.loop, self.read_pipe) self.logger = JsonLogger.with_default_handlers(level=LogLevel.DEBUG)
async def test_extra_parameter_on_log_method_function_call_updates_extra_parameter_on_init(self): logger = JsonLogger.with_default_handlers(level=10, extra={'dog': 'Xablau'}) message = {'log_message': 'Xena'} await logger.info(message, extra={"ham": "eggs"}) logged_content = json.loads(await self.stream_reader.readline()) self.assertEqual(logged_content['msg']['log_message'], 'Xena') self.assertEqual(logged_content['dog'], 'Xablau') self.assertEqual(logged_content['ham'], 'eggs')
async def test_extra_parameter_on_log_method_function_call_updates_extra_parameter_on_init( self): logger = JsonLogger.with_default_handlers(level=10, extra={"dog": "Xablau"}) message = {"log_message": "Xena"} await logger.info(message, extra={"ham": "eggs"}) logged_content = json.loads(await self.stream_reader.readline()) self.assertEqual(logged_content["msg"]["log_message"], "Xena") self.assertEqual(logged_content["dog"], "Xablau") self.assertEqual(logged_content["ham"], "eggs")
async def initialize(): """ Инициализация контекста. :return: """ global task_provider, queue, connection, was_initialized, logger _engine = await gino.create_engine(configs.DATABASE_URI) task_provider = TaskProvider(_engine) connection = await aio_pika.connect(configs.RABBITMQ_URI, loop=asyncio.get_running_loop()) channel = await connection.channel() queue = await channel.declare_queue(configs.TASK_Q_NAME, durable=True) logger = JsonLogger.with_default_handlers( level=logging.DEBUG, exclude_fields=[FUNCTION_NAME_FIELDNAME, 'file_path', 'line_number']) was_initialized = True
import json import asyncio import discord from aiologger.loggers.json import JsonLogger from aiologger.handlers.files import AsyncFileHandler logger = JsonLogger() handler = AsyncFileHandler(filename="./epic/import/history/message_dump.json") logger.add_handler(handler) def get_author(author): return ({ "name": author.name, "discriminator": author.discriminator, "bot": author.bot, "icon_url": getattr(author, "icon_url", None), "guild": { "name": author.guild.name, "id": author.guild.id, } if getattr(author, "guild", None) else None, } if author else None) def get_field(field): return { "name": getattr(field, "name", None), "value": getattr(field, "value", None), "inline": getattr(field, "inline", False), }
import json import time import random import logging from aiologger.loggers.json import JsonLogger from aiologger.handlers.files import AsyncTimedRotatingFileHandler from aiologger.handlers.files import RolloverInterval __all__ = ['build_sample_middleware'] MODEL_NAME = os.environ.get('model_name', None) MODEL_VERSION = os.environ.get('model_version', None) SAMPLE_LOG_BACKUP_COUNT = os.environ.get('model_sample_log_backup_count', 7) sample_logger = JsonLogger(name='jcake', flatten=True) api_logger = logging.getLogger("demo.predict") handler = AsyncTimedRotatingFileHandler(filename="sample/sample", backup_count=SAMPLE_LOG_BACKUP_COUNT, when=RolloverInterval.DAYS, encoding='utf-8') sample_logger.add_handler(handler) def build_sample_info(request_ms, request, response, model_name, model_version): """ 构造采样的信息. 该采样实现目前只支持POST类型的请求.
# -*- coding:utf-8 -*- import sys import getopt import asyncio import logging from aiologger.loggers.json import JsonLogger from amqp_con import AioPikaConsumer logger = JsonLogger.with_default_handlers( level=logging.DEBUG, serializer_kwargs={'indent': 2}, name='my-test', formatter=logging.Formatter( '%(asctime)s %(filename)s[line:%(lineno)d] ' '[%(levelname)s] [%(threadName)s] %(message)s'), loop=asyncio.get_event_loop()) async def msg_process_func(routing_key, data): routing_key_words = routing_key.split(".") symbol = routing_key_words[3] + routing_key_words[4] if 'btcusdt' == symbol: await logger.debug("routing_key: %s, data: %s" % (routing_key, data)) if __name__ == "__main__": opts = [] args = [] try: # opts 获取参数元组(-m file_address),args 获取剩余非(-arg para)格式的参数 opts, args = getopt.getopt(sys.argv[1:], "l:k:q:",
import asyncio import discord from aiologger.loggers.json import JsonLogger from discord.ext.commands import Bot, MissingRequiredArgument, context from search import GoogleSearch from storage import StorageManager logger = JsonLogger.with_default_handlers(name=__name__) def initialize_bot(bot: Bot, storage_manager: StorageManager, google_search: GoogleSearch): @bot.command() async def google(ctx: context.Context, arg): ''' Bot command to let user perform a google search and get top 5 links Ex: !google node !google "game of thrones" ''' # Don't need to wait for the delivery of message hence not using await asyncio.create_task(ctx.send(f'Fetching top 5 links for {arg}')) # This gives user typing symbol in UI with ctx.channel.typing(): # Google search is not dependent on the storage hence not awaiting it asyncio.create_task( storage_manager.save_recent_search(ctx.author.id, arg))
asyncio.create_task(consumer(pg_pool, queue)), data_streams = [ asyncio.create_task(streamer(filename, queue)) for filename in ribs ] await asyncio.gather(*data_streams) await logger.shutdown() if __name__ == "__main__": loop = asyncio.get_event_loop() # Use an async logger logger = JsonLogger.with_default_handlers() # uvloop is a faster eventloop implenation uvloop.install() # This try accept loop does not deal with shutting down coroutines properly. Needs fixing. try: loop.run_until_complete(main()) except KeyboardInterrupt: tasks = asyncio.all_tasks() for task in tasks: task.cancel() finally: loop.run_until_complete(logger.shutdown()) loop.close()
HTTP_HOST: str = "127.0.0.1" HTTP_PORT: int = 8080 FLUSH_TIMEOUT: int = DefaultValues.BULK_FLUSH_INTERVAL # metrics METRICS_NAMESPACE: str = "asyncworker" METRICS_APPPREFIX: Optional[str] METRICS_ROUTE_PATH: str = "/metrics" METRICS_ROUTE_ENABLED: bool = True METRICS_DEFAULT_HISTOGRAM_BUCKETS_IN_MS: List[float] = [ 10, 50, 100, 200, 500, 1000, 5000, INFINITY, ] class Config: allow_mutation = False env_prefix = "ASYNCWORKER_" settings = Settings() loglevel = getattr(logging, settings.LOGLEVEL, logging.INFO) logger = JsonLogger.with_default_handlers(level=loglevel, flatten=True)