def request(self, req_and_resp, opt=None, headers=None): """ """ # make sure all prepared state are clean before processing req, resp = req_and_resp req.reset() resp.reset() opt = opt or {} req, resp = super(TornadoClient, self).request((req, resp), opt) req.prepare(scheme=self.prepare_schemes(req), handle_files=True) req._patch(opt) composed_headers = self.compose_headers(req, headers, opt) tornado_headers = HTTPHeaders() for h in composed_headers: tornado_headers.add(*h) url = url_concat(req.url, req.query) rq = HTTPRequest(url=url, method=req.method.upper(), headers=tornado_headers, body=req.data) rs = yield self.__client.fetch(rq) resp.apply_with(status=rs.code, raw=rs.body) for k, v in rs.headers.get_all(): resp.apply_with(header={k: v}) raise gen.Return(resp)
def test_invalid_content_type_raises_value_error( self, StreamingFormDataParserDelegateMock): delegate = StreamingFormDataParserDelegateMock() headers = HTTPHeaders() headers.add("Content-type", "application/json; charset=UTF-8") with self.assertRaises(ValueError): StreamingFormDataParser(delegate, headers)
def http_client(url, method, cookie, body=None, time_out=None): client = AsyncHTTPClient() headers = HTTPHeaders() headers.add('Content-Type', 'application/json') if cookie: headers.add('Cookie', cookie) if body: body = json_encode(body) if time_out: request = HTTPRequest(url, headers=headers, method=method, validate_cert=False, body=body, request_timeout=time_out) else: request = HTTPRequest(url, headers=headers, method=method, validate_cert=False, body=body) try: response = yield client.fetch(request) except HTTPError as error: raise error else: raise Return(response)
def test_file_upload_multiline(self, StreamingFormDataParserDelegateMock): delegate = StreamingFormDataParserDelegateMock() headers = HTTPHeaders() headers.add("Content-Type", "multipart/form-data; boundary=1234") parser = StreamingFormDataParser(delegate, headers) data = b"""\ --1234 Content-Disposition: form-data; name="files"; filename="ab.txt" Foo Bar --1234-- """.replace(b"\n", b"\r\n") parser.data_received(data) self.assertTrue(delegate.start_file.called) self.assertTrue(delegate.finish_file.called) self.assertTrue(delegate.file_data_received.called) expected_headers = HTTPHeaders() expected_headers.add("Content-Disposition", 'form-data; name="files"; filename="ab.txt"') delegate.start_file.assert_called_with(expected_headers, { "name": "files", "filename": "ab.txt" }) delegate.file_data_received.assert_called_with(b"Foo\r\nBar")
async def github_get_oauth2_token( self, client_id: str, client_secret: str, code: str, ) -> dict: """ Get Github OAuth2 Token Returns: Dictionary with keys access_token, scope, token_type. See https://git.io/J1ON4 Raises: HTTPError: if request fails JSONDecodeError: if parsing the response fails """ http = self.get_auth_http_client() args = { "code": code, "client_id": client_id, "client_secret": client_secret, } headers = HTTPHeaders() headers.add('Accept', 'application/json') response = await http.fetch( self._OAUTH_ACCESS_TOKEN_URL, raise_error=True, method='POST', body=urllib.parse.urlencode(args), headers=headers, ) ret = json_decode(response.body) return ret
def _clean_headers(self): """ 清理headers中不需要的部分,以及替换值 :return: """ headers = self.request.headers # 更新host字段为后端访问网站的host headers['Host'] = self.client.request.endpoint['netloc'] new_headers = HTTPHeaders() # 如果 header 有的是 str,有的是 unicode # 会出现 422 错误 for name, value in headers.get_all(): # 过滤 x-api 开头的, 这些只是发给 api-gateway l_name = name.lower() # 这些 headers 需要传递给后端 required_headers = ['x-api-user-json', 'x-api-access-key'] if l_name.startswith('x-api-') and l_name not in required_headers: pass # 不需要提供 Content-Length, 自动计算 # 如果 Content-Length 不正确, 请求后端网站会出错, # 太大会出现超时问题, 太小会出现内容被截断 elif l_name == 'content-length': pass else: new_headers.add(text_type(name), text_type(value)) return new_headers
def post(self,param): targetURL = self.get_argument('url') if DEBUG: print "target URL: " + targetURL try: serverURL= self.request.protocol + '://' + self.request.host http_client = AsyncHTTPClient() sub = yield http_client.fetch(targetURL, validate_cert=False) sub_filename = targetURL[targetURL.rfind('/'):] sub_filename = "fornow" #TODO - the URL doesn;t have to end with a filename, is it worth keeping? files = [] files.append((sub_filename, sub_filename, sub.body)) fields = [] fields.append(("_xsrf" , self.xsrf_token)) content_type, body = encode_multipart_formdata(fields, files) headers = HTTPHeaders({"Content-Type": content_type, 'content-length': str(len(body))}) headers.add("Cookie", "_xsrf=" + self.xsrf_token) request = HTTPRequest(serverURL + "/import/", "POST", headers=headers, body=body, validate_cert=False) response = yield http_client.fetch(request) self.write(response.body) except Exception, e: print 'Failed to upload from URL (DocumentWrapperHandler)', e self.write("Failed to upload from '" + targetURL + "'") self.finish() self.flush()
def test_string(self): headers = HTTPHeaders() headers.add("Foo", "1") headers.add("Foo", "2") headers.add("Foo", "3") headers2 = HTTPHeaders.parse(str(headers)) self.assertEquals(headers, headers2)
def weibo_request(self, path, callback, access_token=None, expires_in=None, post_args=None, **args): url = "https://api.weibo.com/2/" + path + ".json" all_args = {} if access_token: all_args['access_token'] = access_token all_args.update(args) all_args.update(post_args or {}) header = HTTPHeaders({'Authorization': 'OAuth2 %s' % access_token}) callback = self.async_callback(self._on_weibo_request, callback) http = httpclient.AsyncHTTPClient() if post_args is not None: has_file = False for key,value in post_args.iteritems(): if hasattr(value,"read"): has_file = True if has_file: post_args,boundary = encode_multipart(post_args) header.add('Content-Type', 'multipart/form-data; boundary=%s' %boundary) header.add('Content-Length', len(post_args)) http.fetch(url, method="POST", body=post_args, callback=callback,headers=header) else: http.fetch(url, method="POST", body=urllib.urlencode(all_args), callback=callback,headers=header) else: if all_args: url += "?" + urllib.urlencode(all_args) http.fetch(url, callback=callback,headers=header)
def _prepare_request(self, messages): # Determine the URL for the messages url = self.url if self._append_message_type and len(messages) == 1 and messages[0].channel.is_meta(): message_type = '/'.join(messages[0].channel.parts()[1:]) if not url.endswith('/'): url += '/' url += message_type # Get the headers for the request headers = HTTPHeaders() for header, values in self.get_headers().iteritems(): for value in values: headers.add(header, value) for header, value in headers.get_all(): self.log.debug('Request header %s: %s' % (header, value)) # Get the body for the request body = Message.to_json(messages, encoding='utf8') self.log.debug('Request body (length: %d): %s' % (len(body), body)) # Get the timeout (in seconds) timeout = self.get_timeout(messages) / 1000.0 self.log.debug('Request timeout: %ss' % timeout) # Build and return the request return HTTPRequest( url, method='POST', headers=headers, body=body, connect_timeout=timeout, request_timeout=timeout )
def _prepare_request(self, messages): # Determine the URL for the messages url = self.url if self._append_message_type and len(messages) == 1 and messages[0].channel.is_meta(): message_type = '/'.join(messages[0].channel.parts()[1:]) if not url.endswith('/'): url += '/' url += message_type # Get the headers for the request headers = HTTPHeaders() for header, values in self.get_headers().items(): for value in values: headers.add(header, value) for header, value in headers.get_all(): self.log.debug('Request header %s: %s' % (header, value)) # Get the body for the request body = Message.to_json(messages, encoding='utf8') self.log.debug('Request body (length: %d): %s' % (len(body), body)) # Get the timeout (in seconds) timeout = self.get_timeout(messages) / 1000.0 self.log.debug('Request timeout: %ss' % timeout) # Build and return the request return HTTPRequest( url, method='POST', headers=headers, body=body, connect_timeout=timeout, request_timeout=timeout )
def get_qiku_balance(params, sandbox=False): method = 'GET' url_path = '/mpay/get_balance_m' sig = hmac_sha1_sig(method, url_path, params, APP_KEY) params['sig'] = sig if sandbox: url = '%s%s?%s' % (QIKU_SANDBOX_VERIFY_RECEIPTS_URL, url_path, urllib.urlencode(params)) else: url = '%s%s?%s' % (QIKU_VERIFY_RECEIPTS_URL, url_path, urllib.urlencode(params)) cookies = '; '.join(['session_id=openid', 'session_type=kp_actoken', 'org_loc=%s' % url_path]) headers = HTTPHeaders() headers.add("Cookie", cookies) # http = HTTPClient() # request = HTTPRequest(url, headers=headers) # response = http.fetch(request, validate_cert=False) # rc, data = response.code, response.body rc, data = http.get(url, headers=headers) if sandbox: path = os.path.join(settings.BASE_ROOT, 'logs', 'pay_%s_%s.txt' % ('qk_balance', time.strftime('%F-%T'))) f = open(path, 'w') f.write(repr({'headers': headers, 'url': url, 'response': data})) f.close() data = json.loads(data) return data['ret'], data
def send_object(cls, object_url): """ Sends an OpenSlides object to all connected clients (waiters). First, retrieve the object from the OpenSlides REST api using the given object_url. """ # Join network location with object URL. # TODO: Use host and port as given in the start script wsgi_network_location = settings.OPENSLIDES_WSGI_NETWORK_LOCATION or 'http://localhost:8000' url = ''.join((wsgi_network_location, object_url)) # Send out internal HTTP request to get data from the REST api. for waiter in cls.waiters: # Read waiter's former cookies and parse session cookie to new header object. headers = HTTPHeaders() try: session_cookie = waiter.connection_info.cookies[settings.SESSION_COOKIE_NAME] except KeyError: # There is no session cookie pass else: headers.add('Cookie', '%s=%s' % (settings.SESSION_COOKIE_NAME, session_cookie.value)) # Setup uncompressed request. request = HTTPRequest( url=url, headers=headers, decompress_response=False) # Setup non-blocking HTTP client http_client = AsyncHTTPClient() # Executes the request, asynchronously returning an HTTPResponse # and calling waiter's forward_rest_response() method. http_client.fetch(request, waiter.forward_rest_response)
def test_string(self): headers = HTTPHeaders() headers.add("Foo", "1") headers.add("Foo", "2") headers.add("Foo", "3") headers2 = HTTPHeaders.parse(str(headers)) self.assertEquals(headers, headers2)
def http_cookie(url): task_user_name = 'task_user' task_user_password = '******' url = '%s/api/async/v1/user/login' % url try: client = AsyncHTTPClient() post_data = { "user_name": task_user_name, "user_password": task_user_password } headers = HTTPHeaders() headers.add('Content-Type', 'application/json') headers.add('User-Name', task_user_name) response = yield client.fetch(url, method='POST', headers=headers, body=json_encode(post_data), validate_cert=False, request_timeout=100) cookie = response.headers["Set-cookie"] raise Return(cookie) except HTTPError as error: raise error
def execute(self): url = self._make_url('/images/{0}/push'.format(self.name)) registry, name = resolve_repository_name(self.name) headers = HTTPHeaders() headers.add(REGISTRY_AUTH_HEADER, self._prepare_auth_header_value()) body = '' log.info('Pushing "%s" into "%s"... ', name, registry) log.debug('Pushing url: %s', url) request = HTTPRequest(url, method='POST', headers=headers, body=body, allow_ipv6=True, request_timeout=self.timeout, streaming_callback=self._on_body) try: result = yield self._http_client.fetch(request) if self._lasterr is not None: raise self._lasterr log.info('OK') except Exception as err: log.error('FAIL - %s', err) raise err raise gen.Return(result)
def test_get_headers() -> None: tornado_headers = HTTPHeaders({"content-type": "text/html"}) tornado_headers.add("Set-Cookie", "A=B") tornado_headers.add("Set-Cookie", "C=D") assert get_headers(tornado_headers) == { "content-type": ["text/html"], "set-cookie": ["A=B", "C=D"] }
def test_pickle_roundtrip(self): headers = HTTPHeaders() headers.add('Set-Cookie', 'a=b') headers.add('Set-Cookie', 'c=d') headers.add('Content-Type', 'text/html') pickled = pickle.dumps(headers) unpickled = pickle.loads(pickled) self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all())) self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
def test_pickle_roundtrip(self): headers = HTTPHeaders() headers.add("Set-Cookie", "a=b") headers.add("Set-Cookie", "c=d") headers.add("Content-Type", "text/html") pickled = pickle.dumps(headers) unpickled = pickle.loads(pickled) self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all())) self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
def test_pickle_roundtrip(self): headers = HTTPHeaders() headers.add('Set-Cookie', 'a=b') headers.add('Set-Cookie', 'c=d') headers.add('Content-Type', 'text/html') pickled = pickle.dumps(headers) unpickled = pickle.loads(pickled) self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all())) self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
def populate_cookie_header(self, headers, username='******'): cookie_name, cookie_value = 'user', username secure_cookie = create_signed_value( self.get_app().settings["cookie_secret"], cookie_name, cookie_value) headers = HTTPHeaders(headers) headers.add( 'Cookie', b'='.join( (ensure_binary(cookie_name), ensure_binary(secure_cookie)))) return headers
def test_pickle_roundtrip(self): headers = HTTPHeaders() headers.add("Set-Cookie", "a=b") headers.add("Set-Cookie", "c=d") headers.add("Content-Type", "text/html") pickled = pickle.dumps(headers) unpickled = pickle.loads(pickled) self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all())) self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
def _parse_headers(self): frame = self._header_frames[0] data = b''.join(f.data for f in self._header_frames) self._header_frames = [] if frame.flags & constants.FrameFlag.PRIORITY: # TODO: support PRIORITY and PADDING. # This is just enough to cover an error case tested in h2spec. stream_dep, weight = struct.unpack('>ib', data[:5]) data = data[5:] # strip off the "exclusive" bit stream_dep = stream_dep & 0x7fffffff if stream_dep == frame.stream_id: raise ConnectionError(constants.ErrorCode.PROTOCOL_ERROR, "stream cannot depend on itself") pseudo_headers = {} headers = HTTPHeaders() try: # Pseudo-headers must come before any regular headers, # and only in the first HEADERS phase. has_regular_header = bool( self._phase == constants.HTTPPhase.TRAILERS) for k, v, idx in self.conn.hpack_decoder.decode(bytearray(data)): if k != k.lower(): # RFC section 8.1.2 raise StreamError(self.stream_id, constants.ErrorCode.PROTOCOL_ERROR) if k.startswith(b':'): if self.conn.is_client: valid_pseudo_headers = (b':status', ) else: valid_pseudo_headers = (b':method', b':scheme', b':authority', b':path') if (has_regular_header or k not in valid_pseudo_headers or native_str(k) in pseudo_headers): raise StreamError(self.stream_id, constants.ErrorCode.PROTOCOL_ERROR) pseudo_headers[native_str(k)] = native_str(v) if k == b":authority": headers.add("Host", native_str(v)) else: headers.add(native_str(k), native_str(v)) has_regular_header = True except HpackError: raise ConnectionError(constants.ErrorCode.COMPRESSION_ERROR) if self._phase == constants.HTTPPhase.HEADERS: self._start_request(pseudo_headers, headers) elif self._phase == constants.HTTPPhase.TRAILERS: # TODO: support trailers pass if (not self._maybe_end_stream(frame.flags) and self._phase == constants.HTTPPhase.TRAILERS): # The frame that finishes the trailers must also finish # the stream. raise StreamError(self.stream_id, constants.ErrorCode.PROTOCOL_ERROR)
def test_del_input_header(self): headers = HTTPHeaders() headers.add("Header-Name", "header value1") request = HTTPServerRequest(method='GET', uri='/', headers=headers) exchange = HTTPExchange(request) actions = Actions(del_input_header="Header-Name") actions.execute_input_actions(exchange) keys = list(exchange.request.headers.keys()) self.assertEquals(len(keys), 0) actions = Actions(del_input_header="Header-Name2") actions.execute_input_actions(exchange)
def _parse_headers(self): frame = self._header_frames[0] data = b''.join(f.data for f in self._header_frames) self._header_frames = [] if frame.flags & constants.FrameFlag.PRIORITY: # TODO: support PRIORITY and PADDING. # This is just enough to cover an error case tested in h2spec. stream_dep, weight = struct.unpack('>ib', data[:5]) data = data[5:] # strip off the "exclusive" bit stream_dep = stream_dep & 0x7fffffff if stream_dep == frame.stream_id: raise ConnectionError(constants.ErrorCode.PROTOCOL_ERROR, "stream cannot depend on itself") pseudo_headers = {} headers = HTTPHeaders() try: # Pseudo-headers must come before any regular headers, # and only in the first HEADERS phase. has_regular_header = bool(self._phase == constants.HTTPPhase.TRAILERS) for k, v, idx in self.conn.hpack_decoder.decode(bytearray(data)): if k != k.lower(): # RFC section 8.1.2 raise StreamError(self.stream_id, constants.ErrorCode.PROTOCOL_ERROR) if k.startswith(b':'): if self.conn.is_client: valid_pseudo_headers = (b':status',) else: valid_pseudo_headers = (b':method', b':scheme', b':authority', b':path') if (has_regular_header or k not in valid_pseudo_headers or native_str(k) in pseudo_headers): raise StreamError(self.stream_id, constants.ErrorCode.PROTOCOL_ERROR) pseudo_headers[native_str(k)] = native_str(v) if k == b":authority": headers.add("Host", native_str(v)) else: headers.add(native_str(k), native_str(v)) has_regular_header = True except HpackError: raise ConnectionError(constants.ErrorCode.COMPRESSION_ERROR) if self._phase == constants.HTTPPhase.HEADERS: self._start_request(pseudo_headers, headers) elif self._phase == constants.HTTPPhase.TRAILERS: # TODO: support trailers pass if (not self._maybe_end_stream(frame.flags) and self._phase == constants.HTTPPhase.TRAILERS): # The frame that finishes the trailers must also finish # the stream. raise StreamError(self.stream_id, constants.ErrorCode.PROTOCOL_ERROR)
def test_add_input_header(self): headers = HTTPHeaders() headers.add("Header-Name", "header value1") request = HTTPServerRequest(method='GET', uri='/', headers=headers) exchange = HTTPExchange(request) actions = Actions(add_input_header=("Header-Name", "header value2")) actions.execute_input_actions(exchange) values = exchange.request.headers.get_list('Header-Name') self.assertEquals(len(values), 2) self.assertEquals(values[0], "header value1") self.assertEquals(values[1], "header value2")
def headers_received(self, start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], headers: httputil.HTTPHeaders) -> Optional[Awaitable[None]]: if headers.get("Content-Encoding") == "gzip": self._decompressor = GzipDecompressor() # Downstream delegates will only see uncompressed data, # so rename the content-encoding header. # (but note that curl_httpclient doesn't do this). headers.add("X-Consumed-Content-Encoding", headers["Content-Encoding"]) del headers["Content-Encoding"] return self._delegate.headers_received(start_line, headers)
def test_read_invalid_correlation_id(self): headers = HTTPHeaders() headers.add(CORRELATION_ID_HEADER, 'invalid_header_value') with self.assertRaises(tornado.web.HTTPError) as context: read_tracking_id_headers(headers) raised_exception = context.exception self.assertEqual(raised_exception.status_code, 400) self.assertEqual( raised_exception.log_message, "Invalid X-Correlation-ID header. Should be an UUIDv4 matching regex \'^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\'" )
def headers_received( self, start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine], headers: httputil.HTTPHeaders, ) -> Optional[Awaitable[None]]: if headers.get("Content-Encoding") == "gzip": self._decompressor = GzipDecompressor() # Downstream delegates will only see uncompressed data, # so rename the content-encoding header. # (but note that curl_httpclient doesn't do this). headers.add("X-Consumed-Content-Encoding", headers["Content-Encoding"]) del headers["Content-Encoding"] return self._delegate.headers_received(start_line, headers)
def test_read_valid_correlation_id(self): with self.subTest("Lower case UUID"): headers = HTTPHeaders() headers.add(CORRELATION_ID_HEADER, FIXED_UUID_LOWER_CASE) read_tracking_id_headers(headers) self.assertEqual(mdc.correlation_id.get(), FIXED_UUID_LOWER_CASE) mdc.correlation_id.set('') with self.subTest("Upper case UUID"): headers = HTTPHeaders() headers.add(CORRELATION_ID_HEADER, FIXED_UUID_UPPER_CASE) read_tracking_id_headers(headers) self.assertEqual(mdc.correlation_id.get(), FIXED_UUID_UPPER_CASE)
def _clean_headers(self): """ 清理headers中不需要的部分 :return: """ headers = self.request.headers new_headers = HTTPHeaders() # 如果 header 有的是 str,有的是 unicode # 会出现 422 错误 for name, value in headers.get_all(): if name == ('Content-Length', 'Connection', 'Pragma', 'Cache-Control'): pass else: new_headers.add(name, value) return new_headers
def test_copy(self): all_pairs = [('A', '1'), ('A', '2'), ('B', 'c')] h1 = HTTPHeaders() for k, v in all_pairs: h1.add(k, v) h2 = h1.copy() h3 = copy.copy(h1) h4 = copy.deepcopy(h1) for headers in [h1, h2, h3, h4]: # All the copies are identical, no matter how they were # constructed. self.assertEqual(list(sorted(headers.get_all())), all_pairs) for headers in [h2, h3, h4]: # Neither the dict or its member lists are reused. self.assertIsNot(headers, h1) self.assertIsNot(headers.get_list('A'), h1.get_list('A'))
def test34_request_content_type(self): """Test request_content_type method.""" # no header -> 400 h = mockedLDPHandler() self.assertRaises(HTTPError, h.request_content_type) # one ... h = mockedLDPHandler(headers={'Content-Type': 'a/b; charset="wierdo"'}) self.assertEqual(h.request_content_type(), 'a/b') h = mockedLDPHandler(headers={'Content-Type': 'c/d'}) self.assertEqual(h.request_content_type(), 'c/d') # multiple -> 400 hh = HTTPHeaders() hh.add('Content-Type', 'a/b') hh.add('Content-Type', 'c/d') h = mockedLDPHandler(headers=hh) self.assertRaises(HTTPError, h.request_content_type)
def test_copy(self): all_pairs = [('A', '1'), ('A', '2'), ('B', 'c')] h1 = HTTPHeaders() for k, v in all_pairs: h1.add(k, v) h2 = h1.copy() h3 = copy.copy(h1) h4 = copy.deepcopy(h1) for headers in [h1, h2, h3, h4]: # All the copies are identical, no matter how they were # constructed. self.assertEqual(list(sorted(headers.get_all())), all_pairs) for headers in [h2, h3, h4]: # Neither the dict or its member lists are reused. self.assertIsNot(headers, h1) self.assertIsNot(headers.get_list('A'), h1.get_list('A'))
def __del__(self): http_client = HTTPClient() headers = HTTPHeaders() headers.add(self.SESSTION_LOCATION, self.sessions[self.SESSTION_LOCATION]) headers.add(self.SESSION_X_AUTH_TOKEN, self.sessions[self.SESSION_X_AUTH_TOKEN]) fetch_url ='%s' % self.sessions[self.SESSTION_LOCATION] try: response = http_client.fetch( fetch_url, headers=headers, method='DELETE', validate_cert=False ) return response except HTTPError as error: raise error
def get_required_headers(self): """ 获取需要传递给后端的 headers :return: """ new_headers = HTTPHeaders() # 如果 header 有的是 str,有的是 unicode # 会出现 422 错误 for name, value in self.request.headers.get_all(): # 这些 x-api 开头的 headers 是需要传递给后端 required_headers = [HEADER_BACKEND_USER_JSON] if name in required_headers: new_headers.add(text_type(name), text_type(value)) # 传递 app_id new_headers[HEADER_BACKEND_APP_ID] = self.client.app_id return new_headers
def test_file_upload_special_filenames( self, StreamingFormDataParserDelegateMock): filenames = [ 'a;b.txt', 'a"b.txt', 'a";b.txt', 'a;"b.txt', 'a";";.txt', 'a\\"b.txt', 'a\\b.txt', ] headers = HTTPHeaders() headers.add("Content-Type", "multipart/form-data; boundary=1234") for filename in filenames: delegate = StreamingFormDataParserDelegateMock() parser = StreamingFormDataParser(delegate, headers) data = """\ --1234 Content-Disposition: form-data; name="files"; filename="%s" Foo --1234-- """ % filename.replace('\\', '\\\\').replace('"', '\\"') data = data.replace("\n", "\r\n").encode() parser.data_received(data) self.assertTrue(delegate.start_file.called) self.assertTrue(delegate.finish_file.called) self.assertTrue(delegate.file_data_received.called) expected_headers = HTTPHeaders() expected_headers.add( "Content-Disposition", 'form-data; name="files"; filename="%s"' % filename.replace('\\', '\\\\').replace('"', '\\"')) delegate.start_file.assert_has_calls([ mock.call(expected_headers, { "name": "files", "filename": filename }) ]) delegate.file_data_received.assert_called_with(b"Foo") delegate.reset_mock() StreamingFormDataParserDelegateMock.reset_mock()
def test_line_does_not_end_with_correct_line_break( self, StreamingFormDataParserDelegateMock): delegate = StreamingFormDataParserDelegateMock() headers = HTTPHeaders() headers.add("Content-Type", 'multipart/form-data; boundary=1234') parser = StreamingFormDataParser(delegate, headers) data = b"""\ --1234 Content-Disposition: form-data; name="files"; filename="ab.txt" Foo--1234-- """.replace(b"\n", b"\r\n") parser.data_received(data) self.assertTrue(delegate.start_file.called) self.assertFalse(delegate.finish_file.called) self.assertTrue(delegate.file_data_received.called)
def execute(self): url = self._make_url('/images/{0}/push'.format(self.name)) registry, name = resolve_repository_name(self.name) headers = HTTPHeaders() headers.add('X-Registry-Auth', self._prepare_auth_header_value()) body = '' log.info('Pushing "%s" into "%s"... ', name, registry) request = HTTPRequest(url, method='POST', headers=headers, body=body, request_timeout=self.timeout, streaming_callback=self._on_body) try: yield self._http_client.fetch(request) log.info('OK') except Exception as err: log.error('FAIL - %s', err) raise err
def test_missing_headers(self, StreamingFormDataParserDelegateMock): delegate = StreamingFormDataParserDelegateMock() headers = HTTPHeaders() headers.add("Content-Type", 'multipart/form-data; boundary=1234') parser = StreamingFormDataParser(delegate, headers) data = b"""\ --1234 Foo --1234-- """.replace(b"\n", b"\r\n") with ExpectLog(gen_log, "multipart/form-data missing headers"): parser.data_received(data) self.assertFalse(delegate.start_file.called) self.assertFalse(delegate.finish_file.called) self.assertFalse(delegate.file_data_received.called)
def _deserialize_from_cache(self, value): data = json.loads(value.decode()) if not self.cache_meta_data.is_valid(data): logging.debug('cache is expired!') return None try: headers = HTTPHeaders() for k, v in data['headers']: headers.add(k, v) response = HTTPResponse( HTTPRequest(url=data['url']), int(data['code']), headers, buffer=BytesIO(data['body'].encode()), ) except KeyError: return None return response
def request(self, req_and_resp, opt=None, headers=None): """ """ # make sure all prepared state are clean before processing req, resp = req_and_resp req.reset() resp.reset() opt = opt or {} req, resp = super(TornadoClient, self).request((req, resp), opt) req.prepare(scheme=self.prepare_schemes(req), handle_files=True) req._patch(opt) composed_headers = self.compose_headers(req, headers, opt) tornado_headers = HTTPHeaders() for h in composed_headers: tornado_headers.add(*h) url = url_concat(req.url, req.query) rq = HTTPRequest( url=url, method=req.method.upper(), headers=tornado_headers, body=req.data ) rs = yield self.__client.fetch(rq) resp.apply_with( status=rs.code, raw=rs.body ) for k, v in rs.headers.get_all(): resp.apply_with(header={k: v}) raise gen.Return(resp)
def init_headers(self, args): headers = HTTPHeaders() if args["headers"]: for header in args["headers"]: values = header.split(':', 1) if len(values) == 2: headers.add(*values) else: # values == 1 headers.add(values[0], "") # Setting Cookies if args["cookie"]: headers.add("Cookie", args["cookie"]) return headers
def send_swarm_request(self,url,method="GET",retry=1,content_type="application/json",**kwargs): import tornado.httpclient as http from tornado.httputil import HTTPHeaders from tornado.httpclient import HTTPError if not url.startswith(SWARM_API_URL_BASE): url=SWARM_API_URL_BASE+url http_client = http.HTTPClient() header =HTTPHeaders() header.add("content_type",content_type) if hasattr(self,"cookie"): #logged in header.add("Cookie",self.cookie) else: #not logged-in. readonly header.add("Cookie",self.readonly_cookie) http_request = http.HTTPRequest(url,method=method,headers=header,**kwargs) logger.info("Sending request to SWARM url: %s, method: %s"%(url,method)) if retry<0: retry=1 while retry>0: retry-=1 response=http_client.fetch(http_request,raise_error=False) logger.info (http_request.method+" | "+http_request.url+" | "+str(response.code)) if response.code<300: http_client.close() if "Set-Cookie" in response.headers.keys(): cookie=response.headers["Set-Cookie"] logger.info("Set Coockie: "+ cookie) self.cookie=cookie return response.body elif response.code==599: logger.info("Timeout, retry chance:"+str(retry)) logger.error(response.body) if retry>0: continue else: raise HTTPError(response.code,message=response.body,response=response) else: raise HTTPError(response.code,message=response.body,response=response)
def decode(pairs): headers = HTTPHeaders() for name, value in pairs: headers.add(name, value) return headers
class CrowdAuthProvider(AuthProvider): """ AuthProvider implemention support Atlassian Crowd server using REST API. The following application settings are used to configure this provider: *crowd_auth_provider_url*: root URL of the Crowd API server *crowd_auth_provider_username*: username to authenticate with Crowd API *crowd_auth_provider_password*: password to authenticate with Crowd API """ _CROWD_AUTH_URL = "/rest/usermanagement/1/authentication" _CONFIG_CROWD_URL = "auth_provider_crowd_url" _CONFIG_CROWD_USERNAME = "******" _CONFIG_CROWD_PASSWORD = "******" def __init__(self, application): super(CrowdAuthProvider,self).__init__(application) settings = self.application.settings if self._CONFIG_CROWD_URL in settings: self._crowd_url = settings.get(self._CONFIG_CROWD_URL) else: raise RuntimeError("Settings '"+ self._CONFIG_CROWD_URL + "' not found") if self._CONFIG_CROWD_USERNAME in settings: self._crowd_username = settings.get(self._CONFIG_CROWD_USERNAME) else: raise RuntimeError("Settings '"+ self._CONFIG_CROWD_USERNAME +"' not found") if self._CONFIG_CROWD_PASSWORD in settings: self._crowd_password = settings.get(self._CONFIG_CROWD_PASSWORD) else: raise RuntimeError("Settings '"+ self._CONFIG_CROWD_PASSWORD +"' not found") self._crowd_headers = HTTPHeaders({ "Accept":"application/json", "Content-Type":"application/json" }) self._client = AsyncHTTPClient() @coroutine def authenticate(self, username, password): """ Authenticate user with using the Crowd service API. :returns: a Future that must a resolve to None or a valid AuthUser object. """ auth_url = self._crowd_url auth_url += self._CROWD_AUTH_URL auth_url += "?username="******"value":password } request = HTTPRequest(auth_url, method="POST", auth_mode="basic", auth_username=self._crowd_username, auth_password=self._crowd_password, headers=self._crowd_headers, body=json_encode(auth_body) ) fetch_time = time.clock() try: response = yield self._client.fetch(request) except HTTPError as e: if e.code == 400: # Expected status code from the Crowd API # for unsuccessful user authentication. body = json_decode(e.response.body) _LOGGER.warn("Authentication failure for username: %s: %s", username, body["message"]) return # Re-raise execption raise fetch_time = (time.clock() - fetch_time) * 1000 if fetch_time > 100: _LOGGER.warn("Authentication request success: %sms", fetch_time) else: _LOGGER.info("Authentication request success: %sms", fetch_time) if "Set-Cookie" in response.headers: if "Cookie" in self._crowd_headers: del self._crowd_headers["Cookie"] for cookie in response.headers.get_list("Set-Cookie"): self._crowd_headers.add("Cookie", cookie) body = json_decode(response.body) if "name" not in body: _LOGGER.warn("Missing 'name' attribute in Crowd response") return user = AuthUser() user.username = body["name"] raise Return(user)
class Configuration(object): """Configuration object for requests at a given URI""" HTTP_VERBS = [ 'delete', 'get', 'head', 'options', 'patch', 'post', 'put', 'trace' ] FLAVORS = { 'json': { 'content-type': 'application/json', 'accept': 'application/json', }, 'xml': { 'content-type': 'application/xml', 'accept': 'application/xml', }, 'plain': { 'content-type': 'text/plain', 'accept': 'text/plain', }, # POST and PUT only flavors 'form': { 'content-type': 'application/x-www-form-urlencoded', }, 'multipart': { 'content-type': 'multipart/form-data; boundary=AaB03x', }, } # Default tornado timeout TIMEOUT = 20 def __init__(self, uri, flavors=None, chain=None, compress=False, ca_certs=None, connect_timeout=None, request_timeout=None): """Initialize the configuration for requests at the given URI""" self.uri = uri self.headers = HTTPHeaders() self.flavors = flavors or ['json', 'xml'] self.processors = chain or tornado_chain self.credentials = {} self.verb = None self.use_gzip = compress self.ca_certs = ca_certs # Request extra arguments self.progress_callback = None self.connect_timeout = connect_timeout or self.TIMEOUT self.request_timeout = request_timeout or self.TIMEOUT def __iter__(self): """Iterate over properties""" prop_filter = lambda x: x[0][0] != '_' return itertools.ifilter(prop_filter, self.__dict__.iteritems()) def __getattr__(self, value): """ Perform an HTTP request. This method supports calls to the following methods: delete, get, head, options, patch, post, put, trace Once the HTTP call is performed, a response is returned (unless the async method is used). """ if (value not in self.HTTP_VERBS): raise AttributeError(value) # store current verb to be passed to Request self.verb = value.upper() # set accept if it wasn't set previously if 'accept' not in self.headers: for flavor in self.flavors: if 'accept' in self.FLAVORS[flavor]: self.headers.add('accept', self.FLAVORS[flavor]['accept']) # set form type and default if noone is present verb_allowed = self.verb in ('POST', 'PUT', 'PATCH') if verb_allowed and 'content-type' not in self.headers: self.headers['content-type'] = self.FLAVORS['form']['content-type'] # Debug helper if __debug__: sys.stderr.write("=" * 70) sys.stderr.write("\nRequest:{0} {1}".format(self.verb, self.uri)) sys.stderr.write("\nHeaders:") sys.stderr.write("\n Accept:'{0}'".format(self.headers['accept'])) if 'content-type' in self.headers: ctype = self.headers['content-type'] sys.stderr.write("\n Content-Type:'{0}'".format(ctype)) sys.stderr.write("\n Compressed:'{0}'".format(self.use_gzip)) if self.uri.startswith("https"): sys.stderr.write("\nCerts:'{0}'".format(self.ca_certs)) sys.stderr.write("\n{0}\n".format("=" * 70)) return Request(self) def use(self, feature): """Register a feature (processor) at this configuration""" self.processors.insert(0, feature) return self def secure(self, value=None, port=None, ca_certs=None): """Force connection using https protocol at port specified""" if isinstance(value, bool): scheme = 'http' if not value else 'https' self.uri = _PROT_RE.sub(scheme + r"://\g<url>", self.uri) if isinstance(port, int): regx_str = r"\g<proto>\g<host>:{0}\g<url>".format(port) self.uri = _PORT_RE.sub(regx_str, self.uri) if isinstance(ca_certs, basestring): self.ca_certs = ca_certs return self def compress(self, compress=True): """Notify server that we will be zipping request""" self.use_gzip = bool(compress) return self def progress(self, progress_callback): """ Allow to define a progress callback about operaiton. This progress callback takes 2 arguments, total length, if any and amount of bytes already transfered """ self.progress_callback = progress_callback return self def until(self, connect_timeout=None, request_timeout=None): """Set current timeout in seconds for every call""" self.connect_timeout = connect_timeout or self.connect_timeout self.request_timeout = request_timeout or self.request_timeout return self def as_(self, flavor): """Set up the Content-Type""" if flavor is not None: # Just use default flavors in case we pass a None param if flavor in self.FLAVORS: self.headers.update(self.FLAVORS[flavor]) else: self.headers["accept"] = flavor self.headers["content-type"] = flavor return self def accepts(self, flavor): """Configure the accepted response format""" if flavor is not None: if flavor in self.FLAVORS: flavor = self.FLAVORS[flavor]['accept'] self.headers.add('accept', flavor) return self def auth(self, credentials, path="*", method='plain'): """Authentication feature. It does simple HTTP auth""" # already defined ? if path in self.credentials or method is None: return self # process a regex valid for path expr = "%s.*" if path.endswith('*') else "%s$" rmatch = re.compile(expr % path.rsplit('*', 1)[0]) # now store it self.credentials[path] = (rmatch, method, credentials,) return self
class ResponseHandler(object): timeout = 10 chunked = False length = None finished_headers = False length_sent = False code = message = path = None def __init__(self, conn, stream, start_line): self.conn = conn self.stream = stream self.start_time = time.time() self.method, self.path, self.version = start_line self.version = self.version.rstrip() if not self.conn.old_client and self.version == 'HTTP/1.0': self.conn.old_client = True self.conn.old_client = True else: assert self.version == 'HTTP/1.1' self.headers = HTTPHeaders() self.headers.add("Server", "MYOB/1.0") self.pending = [] # delay body writes until headers sent def date_time_string(self,timestamp=None): """Return the current date and time formatted for a message header.""" if timestamp is None: timestamp = time.time() if isinstance(timestamp,(int,float)): year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp) else: year, month, day, hh, mm, ss, wd, y, z = timestamp s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( self.weekdayname[wd], day, self.monthname[month], year, hh, mm, ss) return s weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] monthname = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] def set_chunked(self): assert False, "uuu" assert self.length is None, "You can't both be chunked and have a length!" self.headers["Transport-Encoding"] = "chunked" self.chunked = True return self.actually_send_header("Transport-Encoding") def set_length(self,length): if self.conn.old_client: return # connection terminates at end of data anyway assert self.chunked is not True, "You can't specify a length when chunking" assert self.code not in {301,302,303,304,204},"No length for these codes" self.headers['Content-Length'] = denumber(length) self.length = length note('set the length te',length) return self.actually_send_header('Content-Length') def check_header(self,name,value): if not self.chunked and name == 'Transport-Encoding' and 'chunked' in self.headers[name]: assert False, 'wonk' self.chunked = True elif name == 'Content-Length': if self.conn.old_client: return True # connection terminates at end of data anyway if not self.length: assert self.code not in {204},"No length for these codes" self.length = value status_sent = False def send_status(self,code,message): print('status',code,message) self.code = code self.message = message self.status_sent = True return self.stream.write(b'HTTP/1.1 '+ utf8(denumber(code))+ b' '+utf8(message)+b'\r\n') def send_header(self,name,value=None): if value is not None: self.headers.add(name,decodeHeader(name,value)) if self.check_header(name,value): del self.headers[name] else: return self.actually_send_header(name) needDate = True @gen.coroutine def actually_send_header(self,name): if self.status_sent is not True: if self.code: yield self.send_status(self.code,self.message) else: print("need to send status first!") raise RuntimeError('please send status') yield send_header(self.stream, name, self.headers[name]) if name == 'Date': self.needDate = False del self.headers[name] @gen.coroutine def end_headers(self): if self.finished_headers: raise RuntimeError('finished headers already!') if not self.conn.old_client: self.headers.add("Connection","keep-alive") if self.needDate: yield self.send_header('Date',datetime.now()) for name,normalized_value in self.headers.get_all(): self.check_header(name,normalized_value) yield send_header(self.stream, name, normalized_value) if not self.chunked and self.length is None: if self.code in {304,204}: #...? assert not self.pending,"No data for these codes allowed (or length header)" else: if not self.conn.old_client: length = 0 for chunk in self.pending: # no reason to chunk, since we got all the body already length += len(chunk) self.headers.add("Content-Length",denumber(length)) yield self.actually_send_header("Content-Length") self.length = length yield self.stream.write(b'\r\n') self.finished_headers = True yield self.flush_pending() @gen.coroutine def flush_pending(self): pending = self.pending self.pending = None for chunk in pending: yield self.write(chunk) written = 0 def write(self,chunk): if self.pending is not None: self.pending.append(chunk) return success if self.chunked: chunk = self.conn._format_chunk(chunk) elif self.length: if isinstance(chunk,str): chunk = utf8(chunk) self.length -= len(chunk) elif self.conn.old_client: if isinstance(chunk,str): chunk = utf8(chunk) elif self.length == 0: raise RuntimeError("Either tried to send 2 chunks while setting a length, or body was supposed to be empty.") else: raise RuntimeError("Can't add to the body and automatically calculate content length. Either set chunked, or set the length, or write the whole body before ending headers.") self.written += len(chunk) return self.stream.write(chunk) @gen.coroutine def respond(self): try: response = yield self.do() note('got response',derpid(self)) if not self.finished_headers: yield self.end_headers() except Redirect as e: yield self.send_status(e.code,e.message) yield self.send_header('Location',e.location) yield self.end_headers() finally: self.recordAccess() def redirect(self,location,code=302,message='boink'): raise Redirect(self,location,code,message) ip = None def recordAccess(self): print(json.dumps((self.ip or self.conn.address[0],self.method,self.code,self.path,self.written,time.time()))) def received_headers(self): pass def received_header(self,name,value): "received a header just now, can setup, or raise an error if this is not a good header" if name == 'Content-Length': note('setting length') self.length = int(value) elif name == 'Transport-Encoding': if 'chunked' in value: assert False, 'uhhh' self.chunked = True def OK(self): "Check headers/IP if this request's body is OK to push." return True def do(self): "return a Future for when writing the response is finished." "override this to wrap all requests in context" return getattr(self, self.method.lower())() def abort(self,stage): "called when a request was in the process of being received, or waiting to start writing back and the connection dies."
def set_user_cookie(secret=None, contenttype="text/html", author=None): headers = HTTPHeaders({"Content-Type": contenttype}) headers.add("Cookie", "user=%s" % get_user_cookie(secret, author=author)) return headers
def sites_google_site_proxyfree4u(self): proxies = [] url = "https://sites.google.com/site/proxyfree4u/proxy-list?offset=" # fetch the latest 10 pages for i in range(0, 100, 10): # print url + str(i) soup = BeautifulSoup(self.getpage(url + str(i))) http_client = HTTPClient() for link in soup.find_all('a'): fetch_url = link.get('href') #get the correct URL if fetch_url == None: continue if fetch_url.find("&single=true&gid=0&output=txt") != -1: request = HTTPRequest( url=fetch_url, connect_timeout=30, request_timeout=30, follow_redirects=False, use_gzip=True, user_agent=Proxy_Miner.User_agent ) # sometime during tests the response was 599. # re-sending the packet 4 times for times in range(0, 4): try: response = http_client.fetch(request) except HTTPError as e: if e.code in [408, 599]: continue #getting the cookies. In order to get the proxy list 2 cookies are needed first_redirect = e.response.headers['Location'] cookie = e.response.headers['Set-Cookie'] cookie_headers = HTTPHeaders() cookie_headers.add("Cookie", cookie.split(";")[0]) req2 = HTTPRequest( url=first_redirect, connect_timeout=30.0, request_timeout=30.0, follow_redirects=False, use_gzip=True, headers=cookie_headers, user_agent=Proxy_Miner.User_agent ) try: http_client.fetch(req2) except HTTPError as e2: second_redirect = e2.response.headers['Location'] # get the second cookie cookie2 = e2.response.headers['Set-Cookie'] cookie_headers.add("Cookie", cookie2.split(";")[0]) req3 = HTTPRequest( url=second_redirect, connect_timeout=30.0, request_timeout=30.0, follow_redirects=True, use_gzip=True, headers=cookie_headers, user_agent=Proxy_Miner.User_agent ) resp3 = http_client.fetch(req3) # print resp3.body lines = resp3.body.split("\n") counter = 0 for j in range(1, len(lines)): proxy = lines[j].split(":") if self.check_proxy(proxy): proxies.append(proxy) # if the list contains non valid proxies else: counter += 1 if counter == 15: break break return proxies
def Start(self, Args): if str(Args).count(self.fsig) > 1: self.Error("Multiple Fuzzing signatures found.\nOnly one" + " fuzzing placeholder is supported.") if Args.FUZZING_SIG: self.fsig = Args.FUZZING_SIG methods = Args.METHOD if methods: for method in methods: if method == "@method@": methods.remove(method) methods.extend(self.load_payload_file("./payloads/HTTPmethods/methods.txt")) #removing doubles methods = list(set(methods)) break else: methods=[] if Args.DATA is None: methods.append("GET") # Autodetect Method else: methods.append("POST") # Autodetect Method if Args.DATA: data = Args.DATA else: data = "" if Args.TARGET: target = Args.TARGET headers = HTTPHeaders() if Args.COOKIE: headers.add("Cookie", Args.COOKIE) if Args.HEADERS: for header in Args.HEADERS: values = header.split(':', 1) if len(values) == 2: headers.add(*values) else: # values == 1 headers.add(values[0], "") if Args.CONTAINS is None and Args.RESP_CODE_DET is None: self.Error("You need to specify a detection method") if Args.CONTAINS: detection_args = [] detection_args.append(Args.CONTAINS[0]) # detection string if "cs" in Args.CONTAINS[1:]: # if case_sensitive detection_args.append("cs") if Args.REVERSE: detection_args.append("rev") self.detection_struct.append(["contains", self.contains, detection_args]) if Args.RESP_CODE_DET: detection_args = [] detection_args.append(Args.RESP_CODE_DET[0]) if Args.REVERSE: detection_args.append("rev") self.detection_struct.append(["Response Code Detection", self.resp_code_detection, detection_args]) ##################################### if Args.LENGTH: print "Scanning mode: Length Detection" ch = Args.LENGTH[0][0] length = self.find_length(target, methods[0], self.detection_struct, ch, headers, None) print "Allowed Length = " + str(length) elif Args.DETECT_ALLOWED_SOURCES: print "Scanning mode: Allowed Sources Detection" accepted_method = Args.ACCEPTED_METHOD param_name = Args.PARAM_NAME param_value = Args.ACCEPTED_PARAM_VALUE param_source = Args.PARAM_SOURCE if accepted_method is None: self.Error("--accepted_method is not specified.") if param_name is None: self.Error("--param_name is not specified.") if param_value is None: self.Error("--param_value is not specified.") if param_source is None: self.Error("--param_source is not specified.") methods = self.load_payload_file("./payloads/HTTPmethods/methods.txt") requests = self.detect_accepted_sources( target, data, headers, param_name, param_source, param_value, methods, accepted_method) else: if Args.PAYLOADS: payloads = [] for payload in Args.PAYLOADS: payloads += self.load_payload_file(payload) else: self.Error("Payloads not Specified") #HPP check hpp_attacking_method = Args.HPP_ATTACKING_METHOD if hpp_attacking_method: print "Scanning mode: HTTP Parameter Pollution Mode" if hpp_attacking_method.upper() == "ASP": # ASP HPP code source = Args.HPP_SOURCE param_name = Args.HPP_PARAM_NAME if source is None: self.Error("--hpp_source is not specified") elif param_name is None: self.Error("--param_name is not specified") else: requests = self.asp_hpp(methods, payloads, param_name, source, target, headers, data) else: # Fuzzing using content placeholders loaded from file print "Scanning mode: Fuzzing Using placeholders" requests = self.create_mal_HTTP_requests( methods, target, payloads, headers, data) if not Args.LENGTH: print "Requests number: " + str(len(requests)) self.fuzz(requests, self.detection_struct)