def __call__(self, env, start_response): if env['REQUEST_METHOD'] in self.methods_to_analyze: url_matcher = self._get_url_matcher(url=env['PATH_INFO']) if url_matcher: request_body = utils.get_body_from_env(env) def save_headers_start_response(status, headers, *args): """Hook for saving response headers for further processing """ self.status = status return start_response(status, headers, *args) # Prepare arguments for ActionLog instance creation create_kwargs = {} actor_id = self._get_actor_id(env) create_kwargs['actor_id'] = actor_id # save actor_id in env for further processing env['fuel.action.actor_id'] = actor_id create_kwargs['start_timestamp'] = datetime.datetime.now() response = self.app(env, save_headers_start_response) create_kwargs['end_timestamp'] = datetime.datetime.now() # since responce is iterator to avoid its exhaustion in # analysing process we make two copies of it: one to be # processed in stats collection logic and the other to # propagate further on middleware stack response_to_analyse, response_to_propagate = \ itertools.tee(response) create_kwargs['action_name'] = \ compiled_urls_actions_mapping[url_matcher]['action_name'] create_kwargs['action_group'] = \ compiled_urls_actions_mapping[url_matcher]['action_group'] create_kwargs['action_type'] = \ consts.ACTION_TYPES.http_request create_kwargs['additional_info'] = \ self._get_additional_info(env, request_body, response_to_analyse) # get cluster_id from url cluster_id = utils.get_group_from_matcher( url_matcher, env['PATH_INFO'], 'cluster_id') if cluster_id: cluster_id = int(cluster_id) create_kwargs['cluster_id'] = cluster_id db.add(ActionLog(**create_kwargs)) db.commit() return response_to_propagate return self.app(env, start_response)
def test_get_body_from_env(self): expected_body = "Hi! I'm test body" expected_len = len(expected_body) env = {"CONTENT_LENGTH": str(expected_len), "wsgi.input": six.StringIO(expected_body)} body = utils.get_body_from_env(env) self.assertTrue(env.get("wsgi.input")) self.assertEqual( len(env["wsgi.input"].read(int(env["CONTENT_LENGTH"]))), expected_len ) self.assertEqual(body, expected_body) for cl in (None, '', 0): env["CONTENT_LENGTH"] = cl body = utils.get_body_from_env(env) self.assertEqual(body, '')
def __call__(self, env, start_response): if env["REQUEST_METHOD"] in self.methods_to_analyze: url_matcher = self._get_url_matcher(url=env["PATH_INFO"]) if url_matcher: request_body = utils.get_body_from_env(env) def save_headers_start_response(status, headers, *args): """Hook for saving resp headers for further processing""" self.status = status return start_response(status, headers, *args) # Prepare arguments for ActionLog instance creation create_kwargs = {} actor_id = self._get_actor_id(env) create_kwargs["actor_id"] = actor_id # save actor_id in env for further processing env["fuel.action.actor_id"] = actor_id create_kwargs["start_timestamp"] = datetime.datetime.utcnow() response = self.app(env, save_headers_start_response) create_kwargs["end_timestamp"] = datetime.datetime.utcnow() # since responce is iterator to avoid its exhaustion in # analysing process we make two copies of it: one to be # processed in stats collection logic and the other to # propagate further on middleware stack response_to_analyse, response_to_propagate = itertools.tee(response) create_kwargs["action_name"] = compiled_urls_actions_mapping[url_matcher]["action_name"] create_kwargs["action_group"] = compiled_urls_actions_mapping[url_matcher]["action_group"] create_kwargs["action_type"] = consts.ACTION_TYPES.http_request create_kwargs["additional_info"] = self._get_additional_info(env, request_body, response_to_analyse) # get cluster_id from url cluster_id = utils.get_group_from_matcher(url_matcher, env["PATH_INFO"], "cluster_id") if cluster_id: cluster_id = int(cluster_id) create_kwargs["cluster_id"] = cluster_id db.add(ActionLog(**create_kwargs)) db.commit() return response_to_propagate return self.app(env, start_response)