async def test_parallel_binds(self, event_loop): """ Binding a variable causes it to be included in the result of merge_contextvars. """ coro1_bind = asyncio.Event() coro2_bind = asyncio.Event() bind_contextvars(c=3) async def coro1(): bind_contextvars(a=1) coro1_bind.set() await coro2_bind.wait() return merge_contextvars(None, None, {"b": 2}) async def coro2(): bind_contextvars(a=2) await coro1_bind.wait() coro2_bind.set() return merge_contextvars(None, None, {"b": 2}) coro1_task = event_loop.create_task(coro1()) coro2_task = event_loop.create_task(coro2()) assert {"a": 1, "b": 2, "c": 3} == await coro1_task assert {"a": 2, "b": 2, "c": 3} == await coro2_task
async def coro2(): bind_contextvars(a=2) await coro1_bind.wait() coro2_bind.set() return merge_contextvars(None, None, {"b": 2})
def logging_config(): """ Handlers added to send logs to correct outputs. Configures structlogger inheriting config of base python logger. Log messages are outputted as JSON. """ error_handler = logging.StreamHandler(sys.stderr) error_handler.setLevel(logging.ERROR) info_handler = logging.StreamHandler(sys.stdout) info_handler.setLevel(logging.INFO) info_handler.addFilter(_MaxLevelFilter(logging.WARNING)) logging.basicConfig(format='%(message)s', level=os.getenv('LOGGING_LEVEL', 'INFO'), handlers=[info_handler, error_handler]) configure( logger_factory=LoggerFactory(), processors=[ structlog.stdlib.add_log_level, structlog.stdlib.add_logger_name, structlog.stdlib.PositionalArgumentsFormatter(), merge_contextvars, structlog.processors.JSONRenderer(), ], ) bind_contextvars(app='SDX-Deliver')
async def _message_loop(self): async for message in self.websocket: # Clear context vars for logging clear_contextvars() bind_contextvars(message=message) if message.type == aiohttp.WSMsgType.ERROR: await self.websocket_message_error(message) continue try: data = json.loads(message.data) except JSONDecodeError as exception: await self.websocket_message_not_parsable(message, exception) continue unbind_contextvars("message") # bind_contextvars(data=data) if "msg" not in data: await self.websocket_message_missing_data_msg(data) continue num_fired = self.websocket_event_system.fire_event( event=data["msg"], data=data) if num_fired == 0: await self.missing_message_handler(data)
def bind_client_name_to_logger(name: str): # type:ignore if name: bind_contextvars(client=name) try: yield finally: unbind_contextvars("client")
def _bind_context(**kwargs): """Context manager to bind logging contextvars.""" # Filter None values before binding vars filtered_kwargs = { key: value for (key, value) in kwargs.items() if value is not None } bind_contextvars(**filtered_kwargs) yield unbind_contextvars(*filtered_kwargs.keys())
def bind_remote_address_to_logger(sock: socket.socket): # type:ignore try: bind_contextvars(remote=sock.getpeername()) except socket.error: pass try: yield finally: unbind_contextvars("remote")
def apply_task_context(task, **kwargs): """This clears the current structured logging context and readies it for a new task from `servicelayer.jobs`.""" # Setup context for structured logging clear_contextvars() bind_contextvars(job_id=task.job.id, stage=task.stage.stage, dataset=task.job.dataset.name, start_time=time.time(), trace_id=str(uuid.uuid4()), **kwargs)
async def add_context_logging(request: Request, call_next): clear_contextvars() request_id = str(uuid.uuid4()) bind_contextvars(request_id=request_id) response = await call_next(request) clear_contextvars() response.headers["X-Request-ID"] = request_id return response
async def inject_request_meta(request: Request, call_next) -> Response: import uuid request_id = str(uuid.uuid4()) _request_id_ctx_var.set(request_id) request.state.request_id = request_id bind_contextvars(request_id=request_id, url=request.url.path, method=request.method) return await call_next(request)
def fire_event(self, event, data): handlers = self.handlers[event] bind_contextvars(handler_event=event) for register_id, callback in handlers.items(): bind_contextvars(register_id=register_id) self.logger.debug("Scheduled handler") self.loop.call_soon( lambda callback, data: asyncio.ensure_future(callback(data)), callback, data, ) return len(handlers)
def test_on_response_streaming_http_response(): class MockRequest: method = 'GET' url = 'http://localhost' req = MockRequest() resp = StreamingHTTPResponse(None) contextvars.bind_contextvars(request_id='test-request', ) ctx = contextvars._CONTEXT_VARS assert 'test-request' == ctx['structlog_request_id'].get() app.on_response(req, resp) ctx = contextvars._CONTEXT_VARS assert ctx['structlog_request_id'].get() is Ellipsis
def process(meta_data: MetaWrapper, data_bytes: bytes) -> str: """ Binds submission data to logger and begins deliver process """ try: bind_contextvars(app="SDX-Deliver") bind_contextvars(tx_id=meta_data.tx_id) bind_contextvars(survey_id=meta_data.survey_id) bind_contextvars(output_type=meta_data.output_type) bind_contextvars(thread=threading.currentThread().getName()) logger.info("Processing request") deliver(meta_data, data_bytes) logger.info("Process completed successfully") return jsonify(success=True) except Exception as e: return server_error(e)
def on_request(request: Request) -> None: """Middleware function that runs prior to processing a request via Sanic.""" # Generate a unique request ID and use it as a field in any logging that # takes place during the request handling. req_id = shortuuid.uuid() request.ctx.uuid = req_id contextvars.clear_contextvars() contextvars.bind_contextvars(request_id=req_id, ) logger.debug( 'processing HTTP request', method=request.method, ip=request.ip, path=request.path, headers=dict(request.headers), args=request.args, )
def generate_request_log(resp, took): """Collect data about the request for analytical purposes.""" # Only add the context info that hasn't been already set in the beginning # of the request payload = { "time": datetime.utcnow().isoformat(), "took": took, "status": resp.status_code, } tags = dict(request.view_args or ()) if hasattr(request, "_log_tags"): tags.update(request._log_tags) for tag, value in tags.items(): if value is not None and tag not in payload: payload[tag] = value bind_contextvars(**payload) # This logging statement is here to make sure we log the context of every # request for analytics purposes log.info("Request handled", request_logging=True)
def setup_logging_context(request): role_id = None if hasattr(request, "authz"): role_id = request.authz.id # Set up context varibales for structured logging. The context is included # with every log entry produced by this particular request clear_contextvars() bind_contextvars( v=__version__, method=request.method, endpoint=request.endpoint, referrer=request.referrer, ip=_get_remote_ip(), ua=str(request.user_agent), begin_time=datetime.utcfromtimestamp(request._begin_time).isoformat(), role_id=role_id, session_id=getattr(request, "_session_id", None), locale=getattr(request, "_app_locale", None), url=request.url, path=request.full_path, trace_id=request._trace_id, )
def process(meta_data: MetaWrapper, data_bytes: bytes) -> Response: """ Binds submission data to logger and begins deliver process """ try: bind_contextvars(tx_id=meta_data.tx_id, survey_id=meta_data.survey_id, output_type=meta_data.output_type, thread=threading.currentThread().getName()) logger.info("Processing request") deliver(meta_data, data_bytes) """ WE USE THE BELOW LOG MESSAGE "logger.info("Process completed successfully")" TO CREATE "LOG-BASED" CUSTOM METRICS. DO NOT CHANGE THIS STATEMENT. """ logger.info("Process completed successfully") return jsonify(success=True) except Exception as e: return server_error(e) finally: unbind_contextvars('tx_id', 'survey_id', 'output_type', 'thread')
def bind(self): bind_contextvars(dataset=self.dataset.name, run_id=self.run_id)
async def coro(): bind_contextvars(a=1) clear_contextvars() return merge_contextvars(None, None, {"b": 2})
async def coro(): bind_contextvars(a=1) return merge_contextvars(None, None, {"a": 111, "b": 2})
async def nested_coro(): bind_contextvars(c=3) return merge_contextvars(None, None, {"b": 2})
async def coro(): bind_contextvars(a=1) return await event_loop.create_task(nested_coro())
async def coro(): bind_contextvars(a=1, c=3) bind_contextvars(c=333, d=4) return merge_contextvars(None, None, {"b": 2})
def __enter__(self): self._existing_vars = contextvars.merge_contextvars( logger=None, method_name=None, event_dict={} ) contextvars.bind_contextvars(**self._context) return self
def _bind(request_id): bind_contextvars(request_id=request_id) yield clear_contextvars()
async def coro(): bind_contextvars(a=1) unbind_contextvars("a") return merge_contextvars(None, None, {"b": 2})
def bind_logger(tracker: Tracker): clear_contextvars() bind_contextvars(sender_id=tracker.sender_id)
async def nested_coro(): bind_contextvars(c=3)
async def coro(): bind_contextvars(a=1) await event_loop.create_task(nested_coro()) return merge_contextvars_context(None, None, {"b": 2})
def bind(self) -> None: bind_contextvars(dataset=self.dataset.name)