def test_exception_in_first(self): def callback1(): raise Exception('callback1 error') def callback2(): self.fail('callback2 should not be called') def finish_callback(): self.fail('finish_callback should not be called') ag = AsyncGroup(finish_callback, name='test_group') cb1 = ag.add(callback1) cb2 = ag.add(callback2) self.assertRaises(Exception, cb1) self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True) with ExpectLog( async_logger, '.*test_group group: ignoring response because of already finished group' ): cb2() self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True)
def test_exception_in_first(self): log = [] def callback1(): raise Exception('callback1 error') def callback2(): self.fail('callback2 should not be called') def finish_callback(): self.fail('finish_callback should not be called') ag = AsyncGroup(finish_callback, log=lambda msg, *args: log.append(msg % args), name='test_group') cb1 = ag.add(callback1) cb2 = ag.add(callback2) self.assertRaises(Exception, cb1) self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True) cb2() self.assertEqual(log[-1], 'test_group group: ignoring response because of already finished group') self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True)
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit( self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self._app_globals.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer( self, self._app_globals.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc if self.get_argument('nopost', None) is not None: self.require_debug_access() self.apply_postprocessor = False self.log.debug( 'apply_postprocessor = False due to "nopost" argument') else: self.apply_postprocessor = True if tornado.options.options.long_request_timeout: # add long requests timeout self.finish_timeout_handle = IOLoop.instance().add_timeout( time.time() + tornado.options.options.long_request_timeout, self.__handle_long_request) self.finish_group = AsyncGroup(self.check_finished( self._finish_page_cb), name='finish', log=self.log.info) self._prepared = True
def test_callbacks(self): data = [] def callback2(): data.append(2) def finish_callback(): self.assertEqual(data, [1, 2]) data.append(3) ag = AsyncGroup(finish_callback) cb1 = ag.add(partial(data.append, 1)) cb2 = ag.add(callback2) self.assertEqual(ag._finish_cb_called, False) ag.try_finish() self.assertEqual(ag._finish_cb_called, False) cb1() self.assertEqual(ag._finish_cb_called, False) cb2() self.assertEqual(ag._finish_cb_called, True) self.assertEqual(ag._aborted, False) self.assertEqual(data, [1, 2, 3])
def test_exception_in_first(self): log = [] def callback1(): raise Exception('callback1 error') def callback2(): self.fail('callback2 should not be called') def finish_callback(): self.fail('finish_callback should not be called') ag = AsyncGroup(finish_callback, log=lambda msg, *args: log.append(msg % args), name='test_group') cb1 = ag.add(callback1) cb2 = ag.add(callback2) self.assertRaises(Exception, cb1) self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True) cb2() self.assertEqual(log[-1], 'test_group group: Ignoring response because of already finished group') self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True)
def group(self, futures, callback=None, name=None): if callable(callback): results_holder = {} group_callback = self.handler.finish_group.add( partial(callback, results_holder)) def delay_cb(): IOLoop.instance().add_callback( self.handler.check_finished(group_callback)) async_group = AsyncGroup(delay_cb, logger=self.handler.log, name=name) def future_callback(name, future): results_holder[name] = future.result() for name, future in iteritems(futures): if future.done(): future_callback(name, future) else: self.handler.add_future( future, async_group.add(partial(future_callback, name))) async_group.try_finish() return futures
def test_finish(self): f = Future() ag = AsyncGroup(partial(f.set_result, True)) self.assertEqual(ag._finish_cb_called, False) ag.add_notification() ag.finish() self.assertEqual(ag._finish_cb_called, True) self.assertEqual(ag._aborted, False) self.assertEqual(f.result(), True)
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug_mode = DebugMode(self) self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish') self.json_producer = self.application.json.get_producer(self) self.json = self.json_producer.json self.xml_producer = self.application.xml.get_producer(self) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc self._prepared = True super(BaseHandler, self).prepare()
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self._app_globals.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer(self, self._app_globals.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc if self.get_argument('nopost', None) is not None: self.require_debug_access() self.apply_postprocessor = False self.log.debug('apply_postprocessor = False due to "nopost" argument') else: self.apply_postprocessor = True if tornado.options.options.long_request_timeout: # add long requests timeout self.finish_timeout_handle = IOLoop.instance().add_timeout( time.time() + tornado.options.options.long_request_timeout, self.__handle_long_request) self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish', log=self.log.info) self._prepared = True
def test_notifications(self): f = Future() ag = AsyncGroup(partial(f.set_result, True)) not1 = ag.add_notification() not2 = ag.add_notification() self.assertEqual(ag._finish_cb_called, False) not1() self.assertEqual(ag._finish_cb_called, False) not2('params', are='ignored') self.assertEqual(ag._finish_cb_called, True) self.assertEqual(ag._aborted, False) self.assertEqual(f.result(), True)
def test_exception_in_final(self): def finish_callback(): raise Exception('callback1 error') ag = AsyncGroup(finish_callback) self.assertRaises(Exception, ag.try_finish) self.assertEqual(ag._finish_cb_called, True) self.assertEqual(ag._aborted, False)
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit( self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self.application.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer( self, self.application.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc self.finish_group = AsyncGroup(self.check_finished( self._finish_page_cb), name='finish', logger=self.log) self._prepared = True
def group(self, futures, callback=None, name=None): if callable(callback): results_holder = {} group_callback = self.handler.finish_group.add(partial(callback, results_holder)) def delay_cb(): IOLoop.instance().add_callback(self.handler.check_finished(group_callback)) async_group = AsyncGroup(delay_cb, logger=self.handler.log, name=name) def callback(future_name, future): results_holder[future_name] = future.result() for name, future in futures.iteritems(): future.add_done_callback(async_group.add(partial(callback, name))) async_group.try_finish() return futures
def _function_under_test(handler): def finished(): res = lxml.etree.Element("result") res.text = str(handler.result) handler.doc.put(res) handler.set_header('X-Foo', 'Bar') handler.set_status(400) handler.result = 0 ag = AsyncGroup(finished) def accumulate(xml, response): if response.code >= 400: raise HTTPError(503, "remote server returned error with code =" + str(response.code)) if xml is None: raise HTTPError(503) handler.result += int(xml.findtext("a")) handler.get_url(handler.config.serviceHost + 'vacancy/1234', callback=ag.add(accumulate)) handler.get_url(handler.config.serviceHost + 'employer/1234', callback=ag.add(accumulate))
def test_exception_in_last(self): log = [] def callback2(): raise Exception('callback1 error') def finish_callback(): self.fail('finish_callback should not be called') ag = AsyncGroup(finish_callback, log=lambda msg, *args: log.append(msg % args), name='test_group') cb1 = ag.add(lambda: None) cb2 = ag.add(callback2) cb1() self.assertRaises(Exception, cb2) self.assertEqual(log[-2], 'test_group group: aborting async group due to unhandled exception in callback') self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True)
def get_page(self): def finished(): res = etree.Element('result') res.text = str(self.result) self.doc.put(res) self.set_header('X-Foo', 'Bar') self.set_status(400) self.result = 0 ag = AsyncGroup(finished) def accumulate(xml, response): if response.code >= 400: raise HTTPError(503, 'remote server returned error with code {}'.format(response.code)) if xml is None: raise HTTPError(503) self.result += int(xml.findtext('a')) self.get_url(self.config.serviceHost + 'vacancy/1234', callback=ag.add(accumulate)) self.get_url(self.config.serviceHost + 'employer/1234', callback=ag.add(accumulate))
def _function_under_test(handler): def finished(): res = lxml.etree.Element('result') res.text = str(handler.result) handler.doc.put(res) handler.set_header('X-Foo', 'Bar') handler.set_status(400) handler.result = 0 ag = AsyncGroup(finished) def accumulate(xml, response): if response.code >= 400: raise HTTPError(503, 'remote server returned error with code {}'.format(response.code)) if xml is None: raise HTTPError(503) handler.result += int(xml.findtext('a')) handler.get_url(handler.config.serviceHost + 'vacancy/1234', callback=ag.add(accumulate)) handler.get_url(handler.config.serviceHost + 'employer/1234', callback=ag.add(accumulate))
def test_exception_in_last(self): def callback2(): raise Exception('callback1 error') def finish_callback(): self.fail('finish_callback should not be called') ag = AsyncGroup(finish_callback, name='test_group') cb1 = ag.add(lambda: None) cb2 = ag.add(callback2) cb1() with ExpectLog( async_logger, '.*test_group group: aborting async group due to unhandled exception in callback' ): self.assertRaises(Exception, cb2) self.assertEqual(ag._finish_cb_called, False) self.assertEqual(ag._aborted, True)
def group(self, futures, callback=None, name=None): if callable(callback): results_holder = {} group_callback = self.handler.finish_group.add( self.handler.check_finished(callback, results_holder)) async_group = AsyncGroup(group_callback, name=name) def future_callback(name, future): results_holder[name] = future.result() for name, future in iteritems(futures): if future.done(): future_callback(name, future) else: self.handler.add_future( future, async_group.add(partial(future_callback, name))) async_group.try_finish_async() return futures
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug = PageHandlerDebug(self) self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish', logger=self.log) self.json_producer = self.application.json.get_producer(self) self.json = self.json_producer.json self.xml_producer = self.application.xml.get_producer(self) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc self._prepared = True
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self.application.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer(self, self.application.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish', logger=self.log) self._prepared = True
def get_page(self): n = int(self.get_argument('n')) self_uri = self.request.host + self.request.path self.add_header('Content-Type', 'text/plain') if n < 2: self.text = '1' return self.acc = 0 def intermediate_cb(text, response): self.acc += int(text) def final_cb(): self.text = str(self.acc) grp = AsyncGroup(final_cb, name='acc') self.get_url(self_uri, {'n': str(n - 1)}, callback=grp.add(intermediate_cb)) self.get_url(self_uri, {'n': str(n - 2)}, callback=grp.add(intermediate_cb))
def get_page(self): def finished(): res = lxml.etree.Element('result') res.text = str(self.result) self.doc.put(res) self.set_header('X-Foo', 'Bar') self.set_status(400) self.result = 0 ag = AsyncGroup(finished) def accumulate(xml, response): if response.code >= 400: raise HTTPError( 503, 'remote server returned error with code {}'.format( response.code)) if xml is None: raise HTTPError(503) self.result += int(xml.findtext('a')) self.get_url(self.config.serviceHost + 'vacancy/1234', callback=ag.add(accumulate)) self.get_url(self.config.serviceHost + 'employer/1234', callback=ag.add(accumulate))
def group(self, futures, callback=None, name=None): if callable(callback): results_holder = {} group_callback = self.handler.finish_group.add( partial(callback, results_holder)) def delay_cb(): IOLoop.instance().add_callback( self.handler.check_finished(group_callback)) async_group = AsyncGroup(delay_cb, log=self.handler.log.debug, name=name) def callback(future_name, future): results_holder[future_name] = future.result().get() for name, future in futures.iteritems(): future.add_done_callback( async_group.add(partial(callback, name))) async_group.try_finish() return futures
def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self.application.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer(self, self.application.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc if frontik.util.get_cookie_or_url_param_value(self, 'nopost') is not None: self.require_debug_access() self.apply_postprocessor = False self.log.debug('apply_postprocessor = False due to "nopost" argument') else: self.apply_postprocessor = True self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish', logger=self.log) self._prepared = True
class PageHandler(tornado.web.RequestHandler): preprocessors = () # to restore tornado.web.RequestHandler compatibility def __init__(self, application, request, app_globals=None, **kwargs): self.handler_started = time.time() self._prepared = False if app_globals is None: raise Exception('{0} need to have app_globals'.format(PageHandler)) self.name = self.__class__.__name__ self.request_id = request.headers.get( 'X-Request-Id', str(global_stats.next_request_id())) logger_name = '.'.join( filter(None, [ self.request_id, getattr(app_globals.config, 'app_name', None) ])) self.log = PageLogger(self, logger_name, request.path or request.uri) self.config = app_globals.config super(PageHandler, self).__init__(application, request, logger=self.log, **kwargs) self._app_globals = app_globals self._debug_access = None # This wrapper is needed in case someone replaces self.fetch_request in runtime, # as happens in client legacy code. def fetch_request_wrapper(*args, **kwargs): return self.fetch_request(*args, **kwargs) self._http_client = HttpClient(self, app_globals.curl_http_client, fetch_request_wrapper) self._template_postprocessors = [] self._early_postprocessors = [] self._late_postprocessors = [] # this is deprecated if hasattr(self.config, 'postprocessor'): self.add_template_postprocessor(self.config.postprocessor) self.text = None def __repr__(self): return '.'.join([self.__module__, self.__class__.__name__]) def initialize(self, logger=None, **kwargs): # Hides logger keyword argument from incompatible tornado versions super(PageHandler, self).initialize(**kwargs) def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit( self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self._app_globals.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer( self, self._app_globals.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc if self.get_argument('nopost', None) is not None: self.require_debug_access() self.apply_postprocessor = False self.log.debug( 'apply_postprocessor = False due to "nopost" argument') else: self.apply_postprocessor = True if tornado.options.options.long_request_timeout: # add long requests timeout self.finish_timeout_handle = IOLoop.instance().add_timeout( time.time() + tornado.options.options.long_request_timeout, self.__handle_long_request) self.finish_group = AsyncGroup(self.check_finished( self._finish_page_cb), name='finish', log=self.log.debug) self._prepared = True def require_debug_access(self, login=None, passwd=None): if self._debug_access is None: if tornado.options.options.debug: self._debug_access = True else: check_login = login if login is not None else tornado.options.options.debug_login check_passwd = passwd if passwd is not None else tornado.options.options.debug_password self._debug_access = frontik.auth.passed_basic_auth( self, check_login, check_passwd) if not self._debug_access: raise HTTPError( 401, headers={'WWW-Authenticate': 'Basic realm="Secure Area"'}) def decode_argument(self, value, name=None): try: return super(PageHandler, self).decode_argument(value, name) except (UnicodeError, tornado.web.HTTPError): self.log.warn( 'Cannot decode utf-8 query parameter, trying other charsets') try: return frontik.util.decode_string_from_charset(value) except UnicodeError: self.log.exception( 'Cannot decode argument, ignoring invalid chars') return value.decode('utf-8', 'ignore') def check_finished(self, callback, *args, **kwargs): original_callback = callback if args or kwargs: callback = partial(callback, *args, **kwargs) def wrapper(*args, **kwargs): if self._finished: self.log.warn('Page was already finished, {0} ignored'.format( original_callback)) else: callback(*args, **kwargs) return wrapper def set_status(self, status_code): if status_code not in httplib.responses: status_code = 503 super(PageHandler, self).set_status(status_code) @staticmethod def add_callback(callback): IOLoop.instance().add_callback(callback) @staticmethod def add_timeout(deadline, callback): IOLoop.instance().add_timeout(deadline, callback) # Requests handling @tornado.web.asynchronous def post(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.post_page)) self._finish_page() @tornado.web.asynchronous def get(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.get_page)) self._finish_page() @tornado.web.asynchronous def head(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.get_page)) self._finish_page() @tornado.web.asynchronous def delete(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.delete_page)) self._finish_page() @tornado.web.asynchronous def put(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.put_page)) self._finish_page() def options(self, *args, **kwargs): raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def _wrap_method(self, handler_method): return handler_method def get_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def post_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def put_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def delete_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def __get_allowed_methods(self): return [ name for name in ('get', 'post', 'put', 'delete') if '{0}_page'.format(name) in vars(self.__class__) ] # HTTP client methods def group(self, futures, callback=None, name=None): return self._http_client.group(futures, callback, name) def get_url(self, url, data=None, headers=None, connect_timeout=None, request_timeout=None, callback=None, follow_redirects=True, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.get_url(url, data, headers, connect_timeout, request_timeout, callback, follow_redirects, labels, add_to_finish_group, parse_response, parse_on_error) def post_url(self, url, data='', headers=None, files=None, connect_timeout=None, request_timeout=None, callback=None, follow_redirects=True, content_type=None, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.post_url(url, data, headers, files, connect_timeout, request_timeout, callback, follow_redirects, content_type, labels, add_to_finish_group, parse_response, parse_on_error) def put_url(self, url, data='', headers=None, connect_timeout=None, request_timeout=None, callback=None, content_type=None, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.put_url(url, data, headers, connect_timeout, request_timeout, callback, content_type, labels, add_to_finish_group, parse_response, parse_on_error) def delete_url(self, url, data='', headers=None, connect_timeout=None, request_timeout=None, callback=None, content_type=None, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.delete_url(url, data, headers, connect_timeout, request_timeout, callback, content_type, labels, add_to_finish_group, parse_response, parse_on_error) def fetch_request(self, request, callback, add_to_finish_group=True): return self._http_client.fetch_request(request, callback, add_to_finish_group) # Finish page def _finish_page(self): self.finish_group.try_finish() def _force_finish(self): self.finish_group.finish() finish_with_postprocessors = _force_finish # this is the official way now, use it instead of _force_finish def _finish_page_cb(self): if not self._finished: self.log.stage_tag('page') def _callback(): if self.text is not None: producer = self._generic_producer elif not self.json.is_empty(): producer = self.json_producer else: producer = self.xml_producer self.log.debug('Using {0} producer'.format(producer)) if self.apply_postprocessor: producer( partial(self._call_postprocessors, self._template_postprocessors, self.finish)) else: producer(self.finish) self._call_postprocessors(self._early_postprocessors, _callback) else: self.log.warn( 'trying to finish already finished page, probably bug in a workflow, ignoring' ) def __handle_long_request(self): self.log.warning("long request detected (uri: {0})".format( self.request.uri)) if tornado.options.options.kill_long_requests: self.send_error() # headers kwarg is deprecated, remove after all usages are gone def send_error(self, status_code=500, headers=None, **kwargs): if self._headers_written: super(PageHandler, self).send_error(status_code, **kwargs) self.clear() self.set_status(status_code) try: self.write_error(status_code, **kwargs) except Exception: self._logger.error("Uncaught exception in write_error", exc_info=True) def write_error(self, status_code=500, **kwargs): # write_error in Frontik must be asynchronous when handling custom errors (due to XSLT) # e.g. raise HTTPError(503) is syncronous and generates a standard Tornado error page, # whereas raise HTTPError(503, xml=...) will call finish_with_postprocessors() # the solution is to move self.finish() from send_error to write_error # so any write_error override must call either finish() or finish_with_postprocessors() in the end # in Tornado 3 it may be better to rewrite this mechanism with futures exception = kwargs.get('exception', None) headers = getattr(exception, 'headers', None) override_content = any( getattr(exception, x, None) is not None for x in ('text', 'xml', 'json')) finish_with_exception = exception is not None and ( 199 < status_code < 400 or # raise HTTPError(200) to finish page immediately override_content) if headers: for (name, value) in headers.iteritems(): self.set_header(name, value) if finish_with_exception: self.json.clear() if getattr(exception, 'text', None) is not None: self.doc.clear() self.text = exception.text elif getattr(exception, 'json', None) is not None: self.text = None self.doc.clear() self.json.put(exception.json) elif getattr(exception, 'xml', None) is not None: self.text = None # cannot clear self.doc due to backwards compatibility, a bug actually self.doc.put(exception.xml) self.finish_with_postprocessors() return return super(PageHandler, self).write_error(status_code, **kwargs) def finish(self, chunk=None): if hasattr(self, 'finish_timeout_handle'): IOLoop.instance().remove_timeout(self.finish_timeout_handle) def _finish_with_async_hook(): self.log.stage_tag('postprocess') if hasattr(self, 'active_limit'): self.active_limit.release() super(PageHandler, self).finish(chunk) IOLoop.instance().add_timeout( time.time() + 0.1, partial(self.log.request_finish_hook, self._status_code, self.request.method, self.request.uri)) try: self._call_postprocessors(self._late_postprocessors, _finish_with_async_hook) except: self.log.exception( 'Error during late postprocessing stage, finishing with an exception' ) self._status_code = 500 _finish_with_async_hook() def flush(self, include_footers=False, **kwargs): self.log.stage_tag('finish') self.log.log_stages() if self._prepared and (self.debug.debug_mode.enabled or self.debug.debug_mode.error_debug): try: self._response_size = sum(map(len, self._write_buffer)) original_headers = {'Content-Length': str(self._response_size)} response_headers = dict(self._headers, **original_headers) original_response = { 'buffer': base64.encodestring(''.join(self._write_buffer)), 'headers': response_headers, 'code': self._status_code } res = self.debug.get_debug_page(self._status_code, response_headers, original_response) if self.debug.debug_mode.enabled: # change status code only if debug was explicitly requested self._status_code = 200 if self.debug.debug_mode.inherited: self.set_header(PageHandlerDebug.DEBUG_HEADER_NAME, True) self.set_header('Content-disposition', '') self.set_header('Content-Length', str(len(res))) self._write_buffer = [res] except Exception: self.log.exception('Cannot write debug info') super(PageHandler, self).flush(include_footers=False, **kwargs) def _log(self): super(PageHandler, self)._log() self.log.stage_tag('flush') self.log.finish_stages(self._status_code) # Preprocessors and postprocessors def _call_preprocessors(self, preprocessors, callback): self._chain_functions(list(preprocessors), callback) def _call_postprocessors(self, postprocessors, callback, *args): self._chain_functions(list(postprocessors), callback, *args) def _chain_functions(self, functions, callback, *args): if functions: func = functions.pop(0) self.log.debug('Started "%r"', func) start_time = time.time() def _callback(*args): time_delta = (time.time() - start_time) * 1000 self.log.debug('Finished "%r" in %.2fms', func, time_delta) self._chain_functions(functions, callback, *args) func(self, *(args + (_callback, ))) else: callback(*args) @staticmethod def add_preprocessor(*preprocessors_list): def _method_wrapper(fn): def _method(self, *args, **kwargs): self._call_preprocessors(preprocessors_list, partial(fn, self, *args, **kwargs)) return _method return _method_wrapper def add_template_postprocessor(self, postprocessor): self._template_postprocessors.append(postprocessor) def add_early_postprocessor(self, postprocessor): self._early_postprocessors.append(postprocessor) def add_late_postprocessor(self, postprocessor): self._late_postprocessors.append(postprocessor) # Producers def _generic_producer(self, callback): self.log.debug('finishing plaintext') callback(self.text) def set_plaintext_response(self, text): self.text = text def xml_from_file(self, filename): return self.xml_producer.xml_from_file(filename) def set_xsl(self, filename): return self.xml_producer.set_xsl(filename) def set_template(self, filename): return self.json_producer.set_template(filename) # TODO: Will be removed def check_xsrf_cookie(self): pass
class BaseHandler(tornado.web.RequestHandler): preprocessors = () # to restore tornado.web.RequestHandler compatibility def __init__(self, application, request, logger, request_id=None, app_globals=None, **kwargs): self._prepared = False if request_id is None: raise Exception('no request_id for {} provided'.format(self.__class__)) if app_globals is None: raise Exception('{} need to have app_globals'.format(self.__class__)) self.name = self.__class__.__name__ self.request_id = request_id self.log = logger self.log.register_handler(self) self.config = app_globals.config super(BaseHandler, self).__init__(application, request, logger=self.log, **kwargs) self._app_globals = app_globals self._debug_access = None self._template_postprocessors = [] self._early_postprocessors = [] self._late_postprocessors = [] self._returned_methods = set() self._http_client = HttpClient(self, self._app_globals.curl_http_client, self.modify_http_client_request) # this is deprecated if hasattr(self.config, 'postprocessor'): self.add_template_postprocessor(self.config.postprocessor) self.text = None def __repr__(self): return '.'.join([self.__module__, self.__class__.__name__]) def initialize(self, logger=None, **kwargs): # Hides logger keyword argument from incompatible tornado versions super(BaseHandler, self).initialize(**kwargs) def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self._app_globals.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer(self, self._app_globals.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc if self.get_argument('nopost', None) is not None: self.require_debug_access() self.apply_postprocessor = False self.log.debug('apply_postprocessor = False due to "nopost" argument') else: self.apply_postprocessor = True if tornado.options.options.long_request_timeout: # add long requests timeout self.finish_timeout_handle = IOLoop.instance().add_timeout( time.time() + tornado.options.options.long_request_timeout, self.__handle_long_request) self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish', log=self.log.info) self._prepared = True def require_debug_access(self, login=None, passwd=None): if self._debug_access is None: if tornado.options.options.debug: self._debug_access = True else: check_login = login if login is not None else tornado.options.options.debug_login check_passwd = passwd if passwd is not None else tornado.options.options.debug_password self._debug_access = frontik.auth.passed_basic_auth(self, check_login, check_passwd) if not self._debug_access: raise HTTPError(401, headers={'WWW-Authenticate': 'Basic realm="Secure Area"'}) def set_default_headers(self): self._headers = tornado.httputil.HTTPHeaders({ 'Server': 'Frontik/{0}'.format(frontik.version) }) def decode_argument(self, value, name=None): try: return super(BaseHandler, self).decode_argument(value, name) except (UnicodeError, tornado.web.HTTPError): self.log.warning('cannot decode utf-8 query parameter, trying other charsets') try: return frontik.util.decode_string_from_charset(value) except UnicodeError: self.log.exception('cannot decode argument, ignoring invalid chars') return value.decode('utf-8', 'ignore') # TODO: change signature after Tornado 3 migration def set_status(self, status_code, **kwargs): if status_code not in httplib.responses: status_code = 503 super(BaseHandler, self).set_status(status_code, **kwargs) @staticmethod def add_callback(callback): IOLoop.instance().add_callback(callback) @staticmethod def add_timeout(deadline, callback): IOLoop.instance().add_timeout(deadline, callback) # Requests handling @tornado.web.asynchronous def get(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.get_page)) self._finish_page() @tornado.web.asynchronous def post(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.post_page)) self._finish_page() @tornado.web.asynchronous def head(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.get_page)) self._finish_page() @tornado.web.asynchronous def delete(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.delete_page)) self._finish_page() @tornado.web.asynchronous def put(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.put_page)) self._finish_page() def options(self, *args, **kwargs): raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def _save_return_value(self, handler_method, *args, **kwargs): def is_handler_method(function_name): return function_name in {'get_page', 'post_page', 'put_page', 'delete_page'} return_value = handler_method(*args, **kwargs) if hasattr(self, 'handle_return_value'): method_name = handler_method.__name__ if is_handler_method(method_name) and method_name not in self._returned_methods: self._returned_methods.add(method_name) self.handle_return_value(method_name, return_value) def get_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def post_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def put_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def delete_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def __get_allowed_methods(self): return [name for name in ('get', 'post', 'put', 'delete') if '{0}_page'.format(name) in vars(self.__class__)] # HTTP client methods def modify_http_client_request(self, request): return request # Finish page def check_finished(self, callback, *args, **kwargs): original_callback = callback if args or kwargs: callback = partial(callback, *args, **kwargs) def wrapper(*args, **kwargs): if self._finished: self.log.warn('page was already finished, {0} ignored'.format(original_callback)) else: callback(*args, **kwargs) return wrapper def _finish_page(self): self.finish_group.try_finish() def finish_with_postprocessors(self): self.finish_group.finish() def _finish_page_cb(self): if not self._finished: def _callback(): self.log.stage_tag('page') if self.text is not None: producer = self._generic_producer elif not self.json.is_empty(): producer = self.json_producer else: producer = self.xml_producer self.log.info('using %s producer', producer) if self.apply_postprocessor: producer(partial(self._call_postprocessors, self._template_postprocessors, self.finish)) else: producer(self.finish) self._call_postprocessors(self._early_postprocessors, _callback) else: self.log.warning('trying to finish already finished page, probably bug in a workflow, ignoring') def __handle_long_request(self): self.log.warning("long request detected (uri: {0})".format(self.request.uri)) if tornado.options.options.kill_long_requests: self.send_error() def on_connection_close(self): self.finish_group.abort() self.log.stage_tag('page') self.log.log_stages(408) raise HTTPError(408, 'Client closed the connection: aborting request') def send_error(self, status_code=500, **kwargs): self.log.stage_tag('page') if self._headers_written: super(BaseHandler, self).send_error(status_code, **kwargs) self.clear() set_status_kwargs = {} if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] if isinstance(exception, HTTPError) and exception.reason: set_status_kwargs['reason'] = exception.reason self.set_status(status_code, **set_status_kwargs) # TODO: add explicit reason kwarg after Tornado 3 migration try: self.write_error(status_code, **kwargs) except Exception: self.log.exception('Uncaught exception in write_error') if not self._finished: self.finish() def write_error(self, status_code=500, **kwargs): # write_error in Frontik must be asynchronous when handling custom errors (due to XSLT) # e.g. raise HTTPError(503) is syncronous and generates a standard Tornado error page, # whereas raise HTTPError(503, xml=...) will call finish_with_postprocessors() # the solution is to move self.finish() from send_error to write_error # so any write_error override must call either finish() or finish_with_postprocessors() in the end # in Tornado 3 it may be better to rewrite this mechanism with futures if 'exception' in kwargs: exception = kwargs['exception'] # Old Tornado elif 'exc_info' in kwargs: exception = kwargs['exc_info'][1] else: exception = None headers = getattr(exception, 'headers', None) override_content = any(getattr(exception, x, None) is not None for x in ('text', 'xml', 'json')) finish_with_exception = exception is not None and ( 199 < status_code < 400 or # raise HTTPError(200) to finish page immediately override_content ) if headers: for (name, value) in headers.iteritems(): self.set_header(name, value) if finish_with_exception: self.json.clear() if getattr(exception, 'text', None) is not None: self.doc.clear() self.text = exception.text elif getattr(exception, 'json', None) is not None: self.text = None self.doc.clear() self.json.put(exception.json) elif getattr(exception, 'xml', None) is not None: self.text = None # cannot clear self.doc due to backwards compatibility, a bug actually self.doc.put(exception.xml) self.finish_with_postprocessors() return self.set_header('Content-Type', 'text/html; charset=UTF-8') return super(BaseHandler, self).write_error(status_code, **kwargs) def finish(self, chunk=None): if hasattr(self, 'finish_timeout_handle'): IOLoop.instance().remove_timeout(self.finish_timeout_handle) def _finish_with_async_hook(): self.log.stage_tag('postprocess') if hasattr(self, 'active_limit'): self.active_limit.release() super(BaseHandler, self).finish(chunk) IOLoop.instance().add_timeout( time.time() + 0.1, partial(self.log.request_finish_hook, self._status_code, self.request.method, self.request.uri) ) try: self._call_postprocessors(self._late_postprocessors, _finish_with_async_hook) except: self.log.exception('error during late postprocessing stage, finishing with an exception') self._status_code = 500 _finish_with_async_hook() def flush(self, include_footers=False, **kwargs): self.log.stage_tag('finish') self.log.info('finished handler %r', self) if self._prepared and (self.debug.debug_mode.enabled or self.debug.debug_mode.error_debug): try: self._response_size = sum(map(len, self._write_buffer)) original_headers = {'Content-Length': str(self._response_size)} response_headers = dict( getattr(self, '_DEFAULT_HEADERS', {}).items() + self._headers.items(), **original_headers ) original_response = { 'buffer': base64.encodestring(''.join(self._write_buffer)), 'headers': response_headers, 'code': self._status_code } res = self.debug.get_debug_page( self._status_code, response_headers, original_response, self.log.get_current_total() ) if self.debug.debug_mode.enabled: # change status code only if debug was explicitly requested self._status_code = 200 if self.debug.debug_mode.inherited: self.set_header(PageHandlerDebug.DEBUG_HEADER_NAME, True) self.set_header('Content-disposition', '') self.set_header('Content-Length', str(len(res))) self._write_buffer = [res] except Exception: self.log.exception('cannot write debug info') super(BaseHandler, self).flush(include_footers=False, **kwargs) def _log(self): super(BaseHandler, self)._log() self.log.stage_tag('flush') self.log.log_stages(self._status_code) # Preprocessors and postprocessors def _call_preprocessors(self, preprocessors, callback): self._chain_functions(iter(preprocessors), callback, 'preprocessor') def _call_postprocessors(self, postprocessors, callback, *args): self._chain_functions(iter(postprocessors), callback, 'postprocessor', *args) def _chain_functions(self, functions, callback, chain_type, *args): try: func = next(functions) start_time = time.time() def _callback(*args): time_delta = (time.time() - start_time) * 1000 self.log.info('finished %s "%r" in %.2fms', chain_type, func, time_delta) self._chain_functions(functions, callback, chain_type, *args) func(self, *(args + (_callback,))) except StopIteration: callback(*args) @staticmethod def add_preprocessor(*preprocessors_list): def _method_wrapper(fn): def _method(self, *args, **kwargs): callback = partial(self._save_return_value, fn, self, *args, **kwargs) return self._call_preprocessors(preprocessors_list, callback) return _method return _method_wrapper def add_template_postprocessor(self, postprocessor): self._template_postprocessors.append(postprocessor) def add_early_postprocessor(self, postprocessor): self._early_postprocessors.append(postprocessor) def add_late_postprocessor(self, postprocessor): self._late_postprocessors.append(postprocessor) # Producers def _generic_producer(self, callback): self.log.debug('finishing plaintext') callback(self.text) # Deprecated, use self.text directly def set_plaintext_response(self, text): self.text = text def xml_from_file(self, filename): return self.xml_producer.xml_from_file(filename) def set_xsl(self, filename): return self.xml_producer.set_xsl(filename) def set_template(self, filename): return self.json_producer.set_template(filename)
class BaseHandler(tornado.web.RequestHandler): preprocessors = () def __init__(self, application, request, **kwargs): self._prepared = False self.name = self.__class__.__name__ self.request_id = request.request_id = RequestContext.get('request_id') self.config = application.config self.log = RequestLogger(request) self.text = None self._exception_hooks = [] for initializer in application.loggers_initializers: initializer(self) super(BaseHandler, self).__init__(application, request, **kwargs) self._debug_access = None self._template_postprocessors = [] self._early_postprocessors = [] self._returned_methods = set() self._http_client = HttpClient(self, self.application.curl_http_client, self.modify_http_client_request) def __repr__(self): return '.'.join([self.__module__, self.__class__.__name__]) def initialize(self, logger=None, **kwargs): super(BaseHandler, self).initialize(**kwargs) def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug_mode = DebugMode(self) self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish') self.json_producer = self.application.json.get_producer(self) self.json = self.json_producer.json self.xml_producer = self.application.xml.get_producer(self) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc self._prepared = True super(BaseHandler, self).prepare() def require_debug_access(self, login=None, passwd=None): if self._debug_access is None: if tornado.options.options.debug: debug_access = True else: check_login = login if login is not None else tornado.options.options.debug_login check_passwd = passwd if passwd is not None else tornado.options.options.debug_password error = frontik.auth.check_debug_auth(self, check_login, check_passwd) debug_access = (error is None) if not debug_access: code, headers = error raise HTTPError(code, headers=headers) self._debug_access = debug_access def set_default_headers(self): self._headers = tornado.httputil.HTTPHeaders({ 'Server': 'Frontik/{0}'.format(frontik.version), 'X-Request-Id': self.request_id, }) def decode_argument(self, value, name=None): try: return super(BaseHandler, self).decode_argument(value, name) except (UnicodeError, tornado.web.HTTPError): self.log.warning('cannot decode utf-8 query parameter, trying other charsets') try: return frontik.util.decode_string_from_charset(value) except UnicodeError: self.log.exception('cannot decode argument, ignoring invalid chars') return value.decode('utf-8', 'ignore') def set_status(self, status_code, reason=None): status_code, reason = process_status_code(status_code, reason) super(BaseHandler, self).set_status(status_code, reason=reason) def redirect(self, url, *args, **kwargs): self.log.info('redirecting to: %s', url) return super(BaseHandler, self).redirect(url, *args, **kwargs) def reverse_url(self, name, *args, **kwargs): return self.application.reverse_url(name, *args, **kwargs) @staticmethod def add_callback(callback): IOLoop.current().add_callback(callback) @staticmethod def add_timeout(deadline, callback): return IOLoop.current().add_timeout(deadline, callback) @staticmethod def remove_timeout(timeout): IOLoop.current().remove_timeout(timeout) @staticmethod def add_future(future, callback): IOLoop.current().add_future(future, callback) # Requests handling def _execute(self, transforms, *args, **kwargs): RequestContext.set('handler_name', repr(self)) return super(BaseHandler, self)._execute(transforms, *args, **kwargs) @tornado.web.asynchronous def get(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.get_page)) self._finish_page() @tornado.web.asynchronous def post(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.post_page)) self._finish_page() @tornado.web.asynchronous def head(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.get_page)) self._finish_page() @tornado.web.asynchronous def delete(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.delete_page)) self._finish_page() @tornado.web.asynchronous def put(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, partial(self._save_return_value, self.put_page)) self._finish_page() def options(self, *args, **kwargs): raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def _save_return_value(self, handler_method, *args, **kwargs): def is_handler_method(function_name): return function_name in {'get_page', 'post_page', 'put_page', 'delete_page'} return_value = handler_method(*args, **kwargs) if hasattr(self, 'handle_return_value'): method_name = handler_method.__name__ if is_handler_method(method_name) and method_name not in self._returned_methods: self._returned_methods.add(method_name) self.handle_return_value(method_name, return_value) def get_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def post_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def put_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def delete_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def __get_allowed_methods(self): return [name for name in ('get', 'post', 'put', 'delete') if '{0}_page'.format(name) in vars(self.__class__)] # HTTP client methods def modify_http_client_request(self, request): return request # Finish page def check_finished(self, callback, *args, **kwargs): original_callback = callback if args or kwargs: callback = partial(callback, *args, **kwargs) def wrapper(*args, **kwargs): if self._finished: self.log.warning('page was already finished, {0} ignored'.format(original_callback)) else: callback(*args, **kwargs) return wrapper def _finish_page(self): self.finish_group.try_finish() def finish_with_postprocessors(self): self.finish_group.finish() def _finish_page_cb(self): if not self._finished: def _callback(): self.log.stage_tag('page') if self.text is not None: producer = self._generic_producer elif not self.json.is_empty(): producer = self.json_producer else: producer = self.xml_producer self.log.debug('using %s producer', producer) producer(partial(self._call_postprocessors, self._template_postprocessors, self.finish)) self._call_postprocessors(self._early_postprocessors, _callback) else: self.log.warning('trying to finish already finished page, probably bug in a workflow, ignoring') def on_connection_close(self): self.finish_group.abort() self.log.stage_tag('page') self.log.log_stages(408) self.cleanup() def register_exception_hook(self, exception_hook): """ Adds a function to the list of hooks, which are executed when `log_exception` is called. `exception_hook` must have the same signature as `log_exception` """ self._exception_hooks.append(exception_hook) def log_exception(self, typ, value, tb): super(BaseHandler, self).log_exception(typ, value, tb) for exception_hook in self._exception_hooks: exception_hook(typ, value, tb) def send_error(self, status_code=500, **kwargs): """`send_error` is adapted to support `write_error` that can call `finish` asynchronously. """ self.log.stage_tag('page') if self._headers_written: super(BaseHandler, self).send_error(status_code, **kwargs) self.clear() reason = None if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] if isinstance(exception, HTTPError) and exception.reason: reason = exception.reason self.set_status(status_code, reason=reason) try: self.write_error(status_code, **kwargs) except Exception: self.log.exception('Uncaught exception in write_error') if not self._finished: self.finish() def write_error(self, status_code=500, **kwargs): """`write_error` can call `finish` asynchronously. This allows, for example, asynchronous templating on error pages. """ if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] else: exception = None headers = getattr(exception, 'headers', None) override_content = any(getattr(exception, x, None) is not None for x in ('text', 'xml', 'json')) finish_with_exception = isinstance(exception, HTTPError) and override_content if headers: for (name, value) in iteritems(headers): self.set_header(name, value) if finish_with_exception: self.json.clear() if getattr(exception, 'text', None) is not None: self.doc.clear() self.text = exception.text elif getattr(exception, 'json', None) is not None: self.text = None self.doc.clear() self.json.put(exception.json) elif getattr(exception, 'xml', None) is not None: self.text = None # cannot clear self.doc due to backwards compatibility, a bug actually self.doc.put(exception.xml) self.finish_with_postprocessors() return self.set_header('Content-Type', 'text/html; charset=UTF-8') return super(BaseHandler, self).write_error(status_code, **kwargs) def cleanup(self): if hasattr(self, 'active_limit'): self.active_limit.release() def finish(self, chunk=None): self.log.stage_tag('postprocess') super(BaseHandler, self).finish(chunk) self.cleanup() # Preprocessors and postprocessors def _call_preprocessors(self, preprocessors, callback): self._chain_functions(iter(preprocessors), callback, 'preprocessor') def _call_postprocessors(self, postprocessors, callback, *args): self._chain_functions(iter(postprocessors), callback, 'postprocessor', *args) def _chain_functions(self, functions, callback, chain_type, *args): try: func = next(functions) start_time = time.time() def _callback(*args): time_delta = (time.time() - start_time) * 1000 self.log.debug('finished %s "%r" in %.2fms', chain_type, func, time_delta) self._chain_functions(functions, callback, chain_type, *args) func(self, *(args + (_callback,))) except StopIteration: callback(*args) @staticmethod def add_preprocessor(*preprocessors_list): def _method_wrapper(fn): def _method(self, *args, **kwargs): callback = partial(self._save_return_value, fn, self, *args, **kwargs) return self._call_preprocessors(preprocessors_list, callback) return _method return _method_wrapper def add_template_postprocessor(self, postprocessor): self._template_postprocessors.append(postprocessor) def add_early_postprocessor(self, postprocessor): self._early_postprocessors.append(postprocessor) # Producers def _generic_producer(self, callback): self.log.debug('finishing plaintext') callback(self.text) def xml_from_file(self, filename): return self.xml_producer.xml_from_file(filename) def set_xsl(self, filename): return self.xml_producer.set_xsl(filename) def set_template(self, filename): return self.json_producer.set_template(filename)
class PageHandler(tornado.web.RequestHandler): preprocessors = () # to restore tornado.web.RequestHandler compatibility def __init__(self, application, request, app_globals=None, **kwargs): self.handler_started = time.time() self._prepared = False if app_globals is None: raise Exception('{0} need to have app_globals'.format(PageHandler)) self.name = self.__class__.__name__ self.request_id = request.headers.get('X-Request-Id', str(global_stats.next_request_id())) logger_name = '.'.join(filter(None, [self.request_id, getattr(app_globals.config, 'app_name', None)])) self.log = PageLogger(self, logger_name, request.path or request.uri) self.config = app_globals.config super(PageHandler, self).__init__(application, request, logger=self.log, **kwargs) self._app_globals = app_globals self._debug_access = None # This wrapper is needed in case someone replaces self.fetch_request in runtime, # as happens in client legacy code. def fetch_request_wrapper(*args, **kwargs): return self.fetch_request(*args, **kwargs) self._http_client = HttpClient(self, app_globals.curl_http_client, fetch_request_wrapper) self._template_postprocessors = [] self._early_postprocessors = [] self._late_postprocessors = [] # this is deprecated if hasattr(self.config, 'postprocessor'): self.add_template_postprocessor(self.config.postprocessor) self.text = None def __repr__(self): return '.'.join([self.__module__, self.__class__.__name__]) def initialize(self, logger=None, **kwargs): # Hides logger keyword argument from incompatible tornado versions super(PageHandler, self).initialize(**kwargs) def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit(self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self._app_globals.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer(self, self._app_globals.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc if self.get_argument('nopost', None) is not None: self.require_debug_access() self.apply_postprocessor = False self.log.debug('apply_postprocessor = False due to "nopost" argument') else: self.apply_postprocessor = True if tornado.options.options.long_request_timeout: # add long requests timeout self.finish_timeout_handle = IOLoop.instance().add_timeout( time.time() + tornado.options.options.long_request_timeout, self.__handle_long_request) self.finish_group = AsyncGroup(self.check_finished(self._finish_page_cb), name='finish', log=self.log.debug) self._prepared = True def require_debug_access(self, login=None, passwd=None): if self._debug_access is None: if tornado.options.options.debug: self._debug_access = True else: check_login = login if login is not None else tornado.options.options.debug_login check_passwd = passwd if passwd is not None else tornado.options.options.debug_password self._debug_access = frontik.auth.passed_basic_auth(self, check_login, check_passwd) if not self._debug_access: raise HTTPError(401, headers={'WWW-Authenticate': 'Basic realm="Secure Area"'}) def decode_argument(self, value, name=None): try: return super(PageHandler, self).decode_argument(value, name) except (UnicodeError, tornado.web.HTTPError): self.log.warn('Cannot decode utf-8 query parameter, trying other charsets') try: return frontik.util.decode_string_from_charset(value) except UnicodeError: self.log.exception('Cannot decode argument, ignoring invalid chars') return value.decode('utf-8', 'ignore') def check_finished(self, callback, *args, **kwargs): original_callback = callback if args or kwargs: callback = partial(callback, *args, **kwargs) def wrapper(*args, **kwargs): if self._finished: self.log.warn('Page was already finished, {0} ignored'.format(original_callback)) else: callback(*args, **kwargs) return wrapper def set_status(self, status_code): if status_code not in httplib.responses: status_code = 503 super(PageHandler, self).set_status(status_code) @staticmethod def add_callback(callback): IOLoop.instance().add_callback(callback) @staticmethod def add_timeout(deadline, callback): IOLoop.instance().add_timeout(deadline, callback) # Requests handling @tornado.web.asynchronous def post(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.post_page)) self._finish_page() @tornado.web.asynchronous def get(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.get_page)) self._finish_page() @tornado.web.asynchronous def head(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.get_page)) self._finish_page() @tornado.web.asynchronous def delete(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.delete_page)) self._finish_page() @tornado.web.asynchronous def put(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors(self.preprocessors, self._wrap_method(self.put_page)) self._finish_page() def options(self, *args, **kwargs): raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def _wrap_method(self, handler_method): return handler_method def get_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def post_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def put_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def delete_page(self): """ This method can be implemented in the subclass """ raise HTTPError(405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def __get_allowed_methods(self): return [name for name in ('get', 'post', 'put', 'delete') if '{0}_page'.format(name) in vars(self.__class__)] # HTTP client methods def group(self, futures, callback=None, name=None): return self._http_client.group(futures, callback, name) def get_url(self, url, data=None, headers=None, connect_timeout=None, request_timeout=None, callback=None, follow_redirects=True, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.get_url( url, data, headers, connect_timeout, request_timeout, callback, follow_redirects, labels, add_to_finish_group, parse_response, parse_on_error ) def post_url(self, url, data='', headers=None, files=None, connect_timeout=None, request_timeout=None, callback=None, follow_redirects=True, content_type=None, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.post_url( url, data, headers, files, connect_timeout, request_timeout, callback, follow_redirects, content_type, labels, add_to_finish_group, parse_response, parse_on_error ) def put_url(self, url, data='', headers=None, connect_timeout=None, request_timeout=None, callback=None, content_type=None, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.put_url( url, data, headers, connect_timeout, request_timeout, callback, content_type, labels, add_to_finish_group, parse_response, parse_on_error ) def delete_url(self, url, data='', headers=None, connect_timeout=None, request_timeout=None, callback=None, content_type=None, labels=None, add_to_finish_group=True, parse_response=True, parse_on_error=False): return self._http_client.delete_url( url, data, headers, connect_timeout, request_timeout, callback, content_type, labels, add_to_finish_group, parse_response, parse_on_error ) def fetch_request(self, request, callback, add_to_finish_group=True): return self._http_client.fetch_request(request, callback, add_to_finish_group) # Finish page def _finish_page(self): self.finish_group.try_finish() def _force_finish(self): self.finish_group.finish() finish_with_postprocessors = _force_finish # this is the official way now, use it instead of _force_finish def _finish_page_cb(self): if not self._finished: self.log.stage_tag('page') def _callback(): if self.text is not None: producer = self._generic_producer elif not self.json.is_empty(): producer = self.json_producer else: producer = self.xml_producer self.log.debug('Using {0} producer'.format(producer)) if self.apply_postprocessor: producer(partial(self._call_postprocessors, self._template_postprocessors, self.finish)) else: producer(self.finish) self._call_postprocessors(self._early_postprocessors, _callback) else: self.log.warn('trying to finish already finished page, probably bug in a workflow, ignoring') def __handle_long_request(self): self.log.warning("long request detected (uri: {0})".format(self.request.uri)) if tornado.options.options.kill_long_requests: self.send_error() # headers kwarg is deprecated, remove after all usages are gone def send_error(self, status_code=500, headers=None, **kwargs): if self._headers_written: super(PageHandler, self).send_error(status_code, **kwargs) self.clear() self.set_status(status_code) try: self.write_error(status_code, **kwargs) except Exception: self._logger.error("Uncaught exception in write_error", exc_info=True) def write_error(self, status_code=500, **kwargs): # write_error in Frontik must be asynchronous when handling custom errors (due to XSLT) # e.g. raise HTTPError(503) is syncronous and generates a standard Tornado error page, # whereas raise HTTPError(503, xml=...) will call finish_with_postprocessors() # the solution is to move self.finish() from send_error to write_error # so any write_error override must call either finish() or finish_with_postprocessors() in the end # in Tornado 3 it may be better to rewrite this mechanism with futures exception = kwargs.get('exception', None) headers = getattr(exception, 'headers', None) override_content = any(getattr(exception, x, None) is not None for x in ('text', 'xml', 'json')) finish_with_exception = exception is not None and ( 199 < status_code < 400 or # raise HTTPError(200) to finish page immediately override_content ) if headers: for (name, value) in headers.iteritems(): self.set_header(name, value) if finish_with_exception: self.json.clear() if getattr(exception, 'text', None) is not None: self.doc.clear() self.text = exception.text elif getattr(exception, 'json', None) is not None: self.text = None self.doc.clear() self.json.put(exception.json) elif getattr(exception, 'xml', None) is not None: self.text = None # cannot clear self.doc due to backwards compatibility, a bug actually self.doc.put(exception.xml) self.finish_with_postprocessors() return return super(PageHandler, self).write_error(status_code, **kwargs) def finish(self, chunk=None): if hasattr(self, 'finish_timeout_handle'): IOLoop.instance().remove_timeout(self.finish_timeout_handle) def _finish_with_async_hook(): self.log.stage_tag('postprocess') if hasattr(self, 'active_limit'): self.active_limit.release() super(PageHandler, self).finish(chunk) IOLoop.instance().add_timeout( time.time() + 0.1, partial(self.log.request_finish_hook, self._status_code, self.request.method, self.request.uri) ) try: self._call_postprocessors(self._late_postprocessors, _finish_with_async_hook) except: self.log.exception('Error during late postprocessing stage, finishing with an exception') self._status_code = 500 _finish_with_async_hook() def flush(self, include_footers=False, **kwargs): self.log.stage_tag('finish') self.log.log_stages() if self._prepared and (self.debug.debug_mode.enabled or self.debug.debug_mode.error_debug): try: self._response_size = sum(map(len, self._write_buffer)) original_headers = {'Content-Length': str(self._response_size)} response_headers = dict(self._headers, **original_headers) original_response = { 'buffer': base64.encodestring(''.join(self._write_buffer)), 'headers': response_headers, 'code': self._status_code } res = self.debug.get_debug_page(self._status_code, response_headers, original_response) if self.debug.debug_mode.enabled: # change status code only if debug was explicitly requested self._status_code = 200 if self.debug.debug_mode.inherited: self.set_header(PageHandlerDebug.DEBUG_HEADER_NAME, True) self.set_header('Content-disposition', '') self.set_header('Content-Length', str(len(res))) self._write_buffer = [res] except Exception: self.log.exception('Cannot write debug info') super(PageHandler, self).flush(include_footers=False, **kwargs) def _log(self): super(PageHandler, self)._log() self.log.stage_tag('flush') self.log.finish_stages(self._status_code) # Preprocessors and postprocessors def _call_preprocessors(self, preprocessors, callback): self._chain_functions(list(preprocessors), callback) def _call_postprocessors(self, postprocessors, callback, *args): self._chain_functions(list(postprocessors), callback, *args) def _chain_functions(self, functions, callback, *args): if functions: func = functions.pop(0) self.log.debug('Started "%r"', func) start_time = time.time() def _callback(*args): time_delta = (time.time() - start_time) * 1000 self.log.debug('Finished "%r" in %.2fms', func, time_delta) self._chain_functions(functions, callback, *args) func(self, *(args + (_callback,))) else: callback(*args) @staticmethod def add_preprocessor(*preprocessors_list): def _method_wrapper(fn): def _method(self, *args, **kwargs): self._call_preprocessors(preprocessors_list, partial(fn, self, *args, **kwargs)) return _method return _method_wrapper def add_template_postprocessor(self, postprocessor): self._template_postprocessors.append(postprocessor) def add_early_postprocessor(self, postprocessor): self._early_postprocessors.append(postprocessor) def add_late_postprocessor(self, postprocessor): self._late_postprocessors.append(postprocessor) # Producers def _generic_producer(self, callback): self.log.debug('finishing plaintext') callback(self.text) def set_plaintext_response(self, text): self.text = text def xml_from_file(self, filename): return self.xml_producer.xml_from_file(filename) def set_xsl(self, filename): return self.xml_producer.set_xsl(filename) def set_template(self, filename): return self.json_producer.set_template(filename) # TODO: Will be removed def check_xsrf_cookie(self): pass
class BaseHandler(tornado.web.RequestHandler): preprocessors = () # to restore tornado.web.RequestHandler compatibility def __init__(self, application, request, logger, request_id=None, **kwargs): self._prepared = False if request_id is None: raise Exception('no request_id for {} provided'.format( self.__class__)) self.name = self.__class__.__name__ self.request_id = request_id self.config = application.config self.log = logger self.log.register_handler(self) self._exception_hooks = [] for initializer in application.loggers_initializers: initializer(self) super(BaseHandler, self).__init__(application, request, logger=self.log, **kwargs) self._debug_access = None self._template_postprocessors = [] self._early_postprocessors = [] self._late_postprocessors = [] self._returned_methods = set() self._http_client = HttpClient(self, self.application.curl_http_client, self.modify_http_client_request) self.text = None def __repr__(self): return '.'.join([self.__module__, self.__class__.__name__]) def initialize(self, logger=None, **kwargs): # Hides logger keyword argument from incompatible tornado versions super(BaseHandler, self).initialize(**kwargs) def prepare(self): self.active_limit = frontik.handler_active_limit.PageHandlerActiveLimit( self) self.debug = PageHandlerDebug(self) self.json_producer = frontik.producers.json_producer.JsonProducer( self, self.application.json, getattr(self, 'json_encoder', None)) self.json = self.json_producer.json self.xml_producer = frontik.producers.xml_producer.XmlProducer( self, self.application.xml) self.xml = self.xml_producer # deprecated synonym self.doc = self.xml_producer.doc self.finish_group = AsyncGroup(self.check_finished( self._finish_page_cb), name='finish', logger=self.log) self._prepared = True def require_debug_access(self, login=None, passwd=None): if self._debug_access is None: if tornado.options.options.debug: debug_access = True else: check_login = login if login is not None else tornado.options.options.debug_login check_passwd = passwd if passwd is not None else tornado.options.options.debug_password error = frontik.auth.check_debug_auth(self, check_login, check_passwd) debug_access = (error is None) if not debug_access: code, headers = error raise HTTPError(code, headers=headers) self._debug_access = debug_access def set_default_headers(self): self._headers = tornado.httputil.HTTPHeaders( {'Server': 'Frontik/{0}'.format(frontik.version)}) def decode_argument(self, value, name=None): try: return super(BaseHandler, self).decode_argument(value, name) except (UnicodeError, tornado.web.HTTPError): self.log.warning( 'cannot decode utf-8 query parameter, trying other charsets') try: return frontik.util.decode_string_from_charset(value) except UnicodeError: self.log.exception( 'cannot decode argument, ignoring invalid chars') return value.decode('utf-8', 'ignore') def set_status(self, status_code, reason=None): status_code, reason = process_status_code(status_code, reason) super(BaseHandler, self).set_status(status_code, reason=reason) @staticmethod def add_callback(callback): IOLoop.instance().add_callback(callback) @staticmethod def add_timeout(deadline, callback): IOLoop.instance().add_timeout(deadline, callback) @staticmethod def add_future(future, callback): IOLoop.instance().add_future(future, callback) # Requests handling @tornado.web.asynchronous def get(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors( self.preprocessors, partial(self._save_return_value, self.get_page)) self._finish_page() @tornado.web.asynchronous def post(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors( self.preprocessors, partial(self._save_return_value, self.post_page)) self._finish_page() @tornado.web.asynchronous def head(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors( self.preprocessors, partial(self._save_return_value, self.get_page)) self._finish_page() @tornado.web.asynchronous def delete(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors( self.preprocessors, partial(self._save_return_value, self.delete_page)) self._finish_page() @tornado.web.asynchronous def put(self, *args, **kwargs): self.log.stage_tag('prepare') self._call_preprocessors( self.preprocessors, partial(self._save_return_value, self.put_page)) self._finish_page() def options(self, *args, **kwargs): raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def _save_return_value(self, handler_method, *args, **kwargs): def is_handler_method(function_name): return function_name in { 'get_page', 'post_page', 'put_page', 'delete_page' } return_value = handler_method(*args, **kwargs) if hasattr(self, 'handle_return_value'): method_name = handler_method.__name__ if is_handler_method( method_name) and method_name not in self._returned_methods: self._returned_methods.add(method_name) self.handle_return_value(method_name, return_value) def get_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def post_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def put_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def delete_page(self): """ This method can be implemented in the subclass """ raise HTTPError( 405, headers={'Allow': ', '.join(self.__get_allowed_methods())}) def __get_allowed_methods(self): return [ name for name in ('get', 'post', 'put', 'delete') if '{0}_page'.format(name) in vars(self.__class__) ] # HTTP client methods def modify_http_client_request(self, request): return request # Finish page def check_finished(self, callback, *args, **kwargs): original_callback = callback if args or kwargs: callback = partial(callback, *args, **kwargs) def wrapper(*args, **kwargs): if self._finished: self.log.warn('page was already finished, {0} ignored'.format( original_callback)) else: callback(*args, **kwargs) return wrapper def _finish_page(self): self.finish_group.try_finish() def finish_with_postprocessors(self): self.finish_group.finish() def _finish_page_cb(self): if not self._finished: def _callback(): self.log.stage_tag('page') if self.text is not None: producer = self._generic_producer elif not self.json.is_empty(): producer = self.json_producer else: producer = self.xml_producer self.log.debug('using %s producer', producer) producer( partial(self._call_postprocessors, self._template_postprocessors, self.finish)) self._call_postprocessors(self._early_postprocessors, _callback) else: self.log.warning( 'trying to finish already finished page, probably bug in a workflow, ignoring' ) def on_connection_close(self): self.finish_group.abort() self.log.stage_tag('page') self.log.log_stages(408) self.cleanup() def register_exception_hook(self, exception_hook): """ Adds a function to the list of hooks, which are executed when `log_exception` is called. `exception_hook` must have the same signature as `log_exception` """ self._exception_hooks.append(exception_hook) def log_exception(self, typ, value, tb): super(BaseHandler, self).log_exception(typ, value, tb) for exception_hook in self._exception_hooks: exception_hook(typ, value, tb) def send_error(self, status_code=500, **kwargs): self.log.stage_tag('page') if self._headers_written: super(BaseHandler, self).send_error(status_code, **kwargs) self.clear() reason = None if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] if isinstance(exception, HTTPError) and exception.reason: reason = exception.reason self.set_status(status_code, reason=reason) try: self.write_error(status_code, **kwargs) except Exception: self.log.exception('Uncaught exception in write_error') if not self._finished: self.finish() def write_error(self, status_code=500, **kwargs): # write_error in Frontik must be asynchronous when handling custom errors (due to XSLT) # e.g. raise HTTPError(503) is syncronous and generates a standard Tornado error page, # whereas raise HTTPError(503, xml=...) will call finish_with_postprocessors() # the solution is to move self.finish() from send_error to write_error # so any write_error override must call either finish() or finish_with_postprocessors() in the end # in Tornado 3 it may be better to rewrite this mechanism with futures if 'exc_info' in kwargs: exception = kwargs['exc_info'][1] else: exception = None headers = getattr(exception, 'headers', None) override_content = any( getattr(exception, x, None) is not None for x in ('text', 'xml', 'json')) finish_with_exception = exception is not None and ( 199 < status_code < 400 or # raise HTTPError(200) to finish page immediately override_content) if headers: for (name, value) in headers.iteritems(): self.set_header(name, value) if finish_with_exception: self.json.clear() if getattr(exception, 'text', None) is not None: self.doc.clear() self.text = exception.text elif getattr(exception, 'json', None) is not None: self.text = None self.doc.clear() self.json.put(exception.json) elif getattr(exception, 'xml', None) is not None: self.text = None # cannot clear self.doc due to backwards compatibility, a bug actually self.doc.put(exception.xml) self.finish_with_postprocessors() return self.set_header('Content-Type', 'text/html; charset=UTF-8') return super(BaseHandler, self).write_error(status_code, **kwargs) def cleanup(self): if hasattr(self, 'active_limit'): self.active_limit.release() def finish(self, chunk=None): def _finish_with_async_hook(): self.log.stage_tag('postprocess') super(BaseHandler, self).finish(chunk) self.cleanup() IOLoop.instance().add_timeout( time.time() + 0.1, partial(self.log.request_finish_hook, self._status_code, self.request.method, self.request.uri)) try: self._call_postprocessors(self._late_postprocessors, _finish_with_async_hook) except: self.log.exception( 'error during late postprocessing stage, finishing with an exception' ) self._status_code = 500 _finish_with_async_hook() def flush(self, include_footers=False, **kwargs): self.log.stage_tag('finish') self.log.info('finished handler %r', self) if self._prepared and self.debug.debug_mode.enabled: try: self._response_size = sum(map(len, self._write_buffer)) original_headers = {'Content-Length': str(self._response_size)} response_headers = dict( getattr(self, '_DEFAULT_HEADERS', {}).items() + self._headers.items(), **original_headers) original_response = { 'buffer': base64.encodestring(''.join(self._write_buffer)), 'headers': response_headers, 'code': self._status_code } res = self.debug.get_debug_page(self._status_code, response_headers, original_response, self.log.get_current_total()) if self.debug.debug_mode.inherited: self.set_header(PageHandlerDebug.DEBUG_HEADER_NAME, True) self.set_header('Content-disposition', '') self.set_header('Content-Length', str(len(res))) self._write_buffer = [res] self._status_code = 200 except Exception: self.log.exception('cannot write debug info') super(BaseHandler, self).flush(include_footers=False, **kwargs) def _log(self): super(BaseHandler, self)._log() self.log.stage_tag('flush') self.log.log_stages(self._status_code) # Preprocessors and postprocessors def _call_preprocessors(self, preprocessors, callback): self._chain_functions(iter(preprocessors), callback, 'preprocessor') def _call_postprocessors(self, postprocessors, callback, *args): self._chain_functions(iter(postprocessors), callback, 'postprocessor', *args) def _chain_functions(self, functions, callback, chain_type, *args): try: func = next(functions) start_time = time.time() def _callback(*args): time_delta = (time.time() - start_time) * 1000 self.log.debug('finished %s "%r" in %.2fms', chain_type, func, time_delta) self._chain_functions(functions, callback, chain_type, *args) func(self, *(args + (_callback, ))) except StopIteration: callback(*args) @staticmethod def add_preprocessor(*preprocessors_list): def _method_wrapper(fn): def _method(self, *args, **kwargs): callback = partial(self._save_return_value, fn, self, *args, **kwargs) return self._call_preprocessors(preprocessors_list, callback) return _method return _method_wrapper def add_template_postprocessor(self, postprocessor): self._template_postprocessors.append(postprocessor) def add_early_postprocessor(self, postprocessor): self._early_postprocessors.append(postprocessor) def add_late_postprocessor(self, postprocessor): self._late_postprocessors.append(postprocessor) # Producers def _generic_producer(self, callback): self.log.debug('finishing plaintext') callback(self.text) # Deprecated, use self.text directly def set_plaintext_response(self, text): self.text = text def xml_from_file(self, filename): return self.xml_producer.xml_from_file(filename) def set_xsl(self, filename): return self.xml_producer.set_xsl(filename) def set_template(self, filename): return self.json_producer.set_template(filename)