def run_coroutine(self): self.json.put( {'coroutine_before_yield': request_context.get_handler_name()}) yield self.post_url(self.request.host, self.request.uri) self.json.put( {'coroutine_after_yield': request_context.get_handler_name()})
def format(self, record): handler_name = request_context.get_handler_name() request_id = request_context.get_request_id() record.name = '.'.join(filter(None, [record.name, handler_name, request_id])) if not record.msg: record.msg = ', '.join(f'{k}={v}' for k, v in getattr(record, CUSTOM_JSON_EXTRA, {}).items()) return super().format(record)
def get_mdc(): mdc = {'thread': MDC.pid, 'role': MDC.role} handler_name = request_context.get_handler_name() if handler_name: mdc['controller'] = handler_name request_id = request_context.get_request_id() if request_id: mdc['rid'] = request_id return mdc
def check(self, request): if self.outer_timeout_ms: already_spent_time_ms = self.time_since_outer_request_start_sec_supplier( ) * 1000 expected_timeout_ms = self.outer_timeout_ms - already_spent_time_ms request_timeout_ms = request.request_time_left * 1000 diff = request_timeout_ms - expected_timeout_ms if diff > self.threshold_ms: data = LoggingData( self.outer_caller, self.outer_timeout_ms, request.upstream.name if request.upstream else None, get_handler_name(), request_timeout_ms) _sender.send_data(data, already_spent_time_ms)
def get_mdc(self): mdc = { 'thread': self.PID } handler_name = request_context.get_handler_name() if handler_name: mdc['controller'] = handler_name request_id = request_context.get_request_id() if request_id: mdc['rid'] = request_id return mdc
def get_page(self): def _waited_callback(name): return self.finish_group.add(partial(_callback, name, self)) self.json.put({'page': request_context.get_handler_name()}) self.add_callback(_waited_callback('callback')) ThreadPoolExecutor(1).submit(_waited_callback('executor')) self.add_future(self.run_coroutine(), self.finish_group.add_notification()) future = self.post_url(self.request.host, self.request.uri) self.add_future(future, _waited_callback('future'))
def flush_stages(self, status_code): """Writes available stages, total value and status code""" self._statsd_client.stack() for s in self._stages: self._statsd_client.time(f'handler.stages.{s.name}.time', int(s.delta)) self._statsd_client.flush() stages_str = ' '.join('{s.name}={s.delta:.2f}'.format(s=s) for s in self._stages) total = sum(s.delta for s in self._stages) stages_logger.info( 'timings for %(page)s : %(stages)s', { 'page': request_context.get_handler_name(), 'stages': '{0} total={1:.2f} code={2}'.format(stages_str, total, status_code) }, )
def log_request(self, handler): if not options.log_json: super().log_request(handler) return request_time = int(1000.0 * handler.request.request_time()) extra = { 'ip': handler.request.remote_ip, 'rid': request_context.get_request_id(), 'status': handler.get_status(), 'time': request_time, 'method': handler.request.method, 'uri': handler.request.uri, } handler_name = request_context.get_handler_name() if handler_name: extra['controller'] = handler_name JSON_REQUESTS_LOGGER.info('', extra={CUSTOM_JSON_EXTRA: extra})
def filter(self, record): handler_name = request_context.get_handler_name() request_id = request_context.get_request_id() record.name = '.'.join(filter(None, [record.name, handler_name, request_id])) return True
def _callback(name, handler, *args): handler.json.put({name: request_context.get_handler_name()})
def produce_debug_body(self, finishing): if not finishing: return b'' start_time = time.time() debug_log_data = request_context.get_log_handler().produce_all() debug_log_data.set('code', str(int(self.status_code))) debug_log_data.set('handler-name', request_context.get_handler_name()) debug_log_data.set('started', _format_number(self.request._start_time)) debug_log_data.set('request-id', str(self.request.request_id)) debug_log_data.set( 'stages-total', _format_number((time.time() - self.request._start_time) * 1000)) try: debug_log_data.append( E.versions( _pretty_print_xml( frontik.app.get_frontik_and_apps_versions( self.application)))) except Exception: debug_log.exception('cannot add version information') debug_log_data.append( E.versions('failed to get version information')) try: debug_log_data.append( E.status( _pretty_print_json(self.application.get_current_status()))) except Exception: debug_log.exception('cannot add status information') debug_log_data.append(E.status('failed to get status information')) debug_log_data.append( E.request(E.method(self.request.method), _params_to_xml(self.request.uri), _headers_to_xml(self.request.headers), _cookies_to_xml(self.request.headers))) debug_log_data.append( E.response(_headers_to_xml(self.headers), _cookies_to_xml(self.headers))) response_buffer = b''.join(self.chunks) original_response = { 'buffer': base64.b64encode(response_buffer), 'headers': dict(self.headers), 'code': int(self.status_code) } debug_log_data.append( frontik.xml_util.dict_to_xml(original_response, 'original-response')) debug_log_data.set('response-size', str(len(response_buffer))) debug_log_data.set('generate-time', _format_number((time.time() - start_time) * 1000)) for upstream in debug_log_data.xpath('//meta-info/upstream'): upstream.set('color', _string_to_color(upstream.get('name'))) if not getattr(self.request, '_debug_inherited', False): try: transform = etree.XSLT(etree.parse(DEBUG_XSL)) log_document = utf8(str(transform(debug_log_data))) except Exception: debug_log.exception('XSLT debug file error') try: debug_log.error('XSL error log entries:\n' + '\n'.join( '{0.filename}:{0.line}:{0.column}\n\t{0.message}'. format(m) for m in transform.error_log)) except Exception: pass log_document = etree.tostring(debug_log_data, encoding='UTF-8', xml_declaration=True) else: log_document = etree.tostring(debug_log_data, encoding='UTF-8', xml_declaration=True) return log_document
def run_coroutine(self): self.json.put({'coroutine_before_yield': request_context.get_handler_name()}) yield self.post_url(self.request.host, self.request.uri) self.json.put({'coroutine_after_yield': request_context.get_handler_name()})
def produce_debug_body(self, finishing): if not finishing: return b'' start_time = time.time() debug_log_data = request_context.get_log_handler().produce_all() debug_log_data.set('code', str(int(self.status_code))) debug_log_data.set('handler-name', request_context.get_handler_name()) debug_log_data.set('started', _format_number(self.request._start_time)) debug_log_data.set('request-id', str(self.request.request_id)) debug_log_data.set('stages-total', _format_number((time.time() - self.request._start_time) * 1000)) try: debug_log_data.append(E.versions( _pretty_print_xml( frontik.app.get_frontik_and_apps_versions(self.application) ) )) except Exception: debug_log.exception('cannot add version information') debug_log_data.append(E.versions('failed to get version information')) try: debug_log_data.append(E.status( _pretty_print_json(self.application.get_current_status()) )) except Exception: debug_log.exception('cannot add status information') debug_log_data.append(E.status('failed to get status information')) debug_log_data.append(E.request( E.method(self.request.method), _params_to_xml(self.request.uri), _headers_to_xml(self.request.headers), _cookies_to_xml(self.request.headers) )) debug_log_data.append(E.response( _headers_to_xml(self.headers), _cookies_to_xml(self.headers) )) response_buffer = b''.join(self.chunks) original_response = { 'buffer': base64.b64encode(response_buffer), 'headers': dict(self.headers), 'code': int(self.status_code) } debug_log_data.append(frontik.xml_util.dict_to_xml(original_response, 'original-response')) debug_log_data.set('response-size', str(len(response_buffer))) debug_log_data.set('generate-time', _format_number((time.time() - start_time) * 1000)) for upstream in debug_log_data.xpath('//meta-info/upstream'): upstream.set('color', _string_to_color(upstream.get('name'))) if not getattr(self.request, '_debug_inherited', False): try: transform = etree.XSLT(etree.parse(DEBUG_XSL)) log_document = utf8(str(transform(debug_log_data))) except Exception: debug_log.exception('XSLT debug file error') try: debug_log.error('XSL error log entries:\n' + '\n'.join( '{0.filename}:{0.line}:{0.column}\n\t{0.message}'.format(m) for m in transform.error_log )) except Exception: pass log_document = etree.tostring(debug_log_data, encoding='UTF-8', xml_declaration=True) else: log_document = etree.tostring(debug_log_data, encoding='UTF-8', xml_declaration=True) return log_document